1. Test Modules
  2. Training Characteristics
    1. Input Learning
      1. Gradient Descent
      2. Conjugate Gradient Descent
      3. Limited-Memory BFGS
    2. Results
  3. Results

Subreport: Logs for com.simiacryptus.ref.lang.ReferenceCountingBase

Test Modules

Using Seed 5604097085235062784

Training Characteristics

Input Learning

In this apply, we use a network to learn this target input, given it's pre-evaluated output:

TrainingTester.java:332 executed in 0.03 seconds (0.000 gc):

    return RefArrays.stream(RefUtil.addRef(input_target)).flatMap(RefArrays::stream).map(x -> {
      try {
        return x.prettyPrint();
      } finally {
        x.freeRef();
      }
    }).reduce((a, b) -> a + "\n" + b).orElse("");

Returns

    [
    	[ [ -0.88, 1.644, -1.588 ], [ -0.66, -0.504, -1.688 ], [ -0.724, 0.46, -0.856 ], [ 0.184, 1.92, -1.208 ], [ -0.68, -1.584, 1.904 ], [ 1.08, 0.76, 1.716 ], [ 0.036, 0.848, 1.324 ], [ -0.66, -0.832, -0.452 ], ... ],
    	[ [ -0.976, -1.032, -0.38 ], [ -1.768, 0.052, 0.08 ], [ -1.3, 0.024, -0.824 ], [ -1.34, 1.936, 0.032 ], [ 0.716, -0.9, 0.676 ], [ -0.412, 0.528, 0.648 ], [ -1.896, 0.976, -1.944 ], [ 0.868, 1.24, 0.54 ], ... ],
    	[ [ -0.444, 0.028, -0.088 ], [ 0.204, -1.4, -0.096 ], [ -0.484, -0.264, 1.276 ], [ 1.564, 0.392, -0.432 ], [ -0.34, 0.592, 1.888 ], [ 0.0, -1.632, -0.192 ], [ -0.804, 0.136, 0.52 ], [ 1.584, -1.184, -1.732 ], ... ],
    	[ [ 0.516, -0.816, -1.284 ], [ -0.7, -0.676, 1.472 ], [ 0.6, -0.152, -1.384 ], [ -0.24, 0.092, 1.36 ], [ -1.36, 1.528, 1.624 ], [ 0.216, -1.348, 0.024 ], [ 1.836, -0.66, 1.992 ], [ -1.864, 1.208, 0.128 ], ... ],
    	[ [ 0.568, -0.632, -1.016 ], [ -1.084, -0.236, -1.092 ], [ -0.352, 1.488, 1.872 ], [ -1.532, 1.956, 1.828 ], [ 0.376, -0.664, 0.864 ], [ 0.9, 0.016, -1.496 ], [ 1.328, 0.324, 0.868 ], [ 0.572, -0.344, 1.168 ], ... ],
    	[ [ 0.556, 0.4, -1.636 ], [ 0.044, -1.744, 0.904 ], [ 1.156, -0.596, -1.684 ], [ -0.26, 0.24, 1.312 ], [ 0.744, -1.472, 0.152 ], [ -0.196, -0.528, 0.588 ], [ -1.932, 0.952, -0.408 ], [ -0.472, 1.12, -1.12 ], ... ],
    	[ [ -1.484, -1.1, -1.36 ], [ 0.064, 0.892, 1.02 ], [ -1.26, 0.744, -0.644 ], [ -0.596, -1.828, -0.928 ], [ -1.904, 1.108, -1.852 ], [ -1.892, -1.176, -0.6 ], [ 0.164, 1.684, -1.176 ], [ -0.204, 1.06, 1.588 ], ... ],
    	[ [ 0.28, 0.748, -0.86 ], [ 0.244, -0.648, -1.5 ], [ -0.952, -1.264, 0.528 ], [ -1.052, 0.112, 0.428 ], [ -0.492, 0.076, -0.18 ], [ 0.956, 0.988, -0.516 ], [ 0.28, 0.768, -0.452 ], [ 0.432, 1.696, -1.8 ], ... ],
    	...
    ]
    [
    	[ [ -0.152, 1.712, 1.784 ], [ -1.912, -1.264, -0.62 ], [ -1.304, 0.044, -1.548 ], [ 0.18, 0.4, -1.244 ], [ 1.488, -1.672, 1.664 ], [ 1.612, 0.004, 0.132 ], [ -1.9, 0.516, -1.184 ], [ 1.044, -1.916, -0.692 ], ... ],
    	[ [ 0.704, 1.664, -0.992 ], [ 1.012, -0.46, 0.928 ], [ 0.912, 1.348, 0.528 ], [ 1.176, 1.16, -0.352 ], [ -1.148, -1.528, 0.304 ], [ 1.444, 0.644, -1.288 ], [ -0.136, 1.68, 1.488 ], [ -1.416, 1.028, 0.624 ], ... ],
    	[ [ -1.344, -1.848, -0.344 ], [ -0.468, -1.848, 0.22 ], [ 0.864, 0.628, 0.868 ], [ 0.956, -1.96, -0.2 ], [ -1.344, 1.716, 1.852 ], [ -1.12, 1.308, 0.472 ], [ -0.756, 1.52, 0.26 ], [ -1.724, 0.816, 0.704 ], ... ],
    	[ [ 1.78, 0.896, 0.58 ], [ -1.36, 0.064, 1.208 ], [ 1.948, -1.008, -0.456 ], [ 1.18, -0.428, 0.392 ], [ 0.216, -0.98, -0.376 ], [ -1.76, 0.232, 0.004 ], [ 1.772, 1.552, -1.372 ], [ -1.64, 1.952, -1.256 ], ... ],
    	[ [ 0.044, 0.008, 1.576 ], [ -0.228, 1.856, 0.232 ], [ 1.128, -1.208, 0.476 ], [ 0.9, -1.028, 1.824 ], [ 1.792, 0.46, 1.436 ], [ -1.904, 0.256, 0.14 ], [ 1.884, -1.072, 1.868 ], [ -1.2, -0.508, -1.252 ], ... ],
    	[ [ 1.64, -0.704, -0.904 ], [ -1.508, -1.7, 0.0 ], [ -0.56, 0.196, -0.076 ], [ -1.08, 0.404, -0.576 ], [ -0.052, 0.204, -1.036 ], [ -0.3, -1.14, -1.552 ], [ 0.548, -1.212, 0.872 ], [ 1.98, -0.616, -0.648 ], ... ],
    	[ [ 1.652, 1.06, -1.92 ], [ -0.932, 0.928, -0.48 ], [ -1.772, -0.628, 0.572 ], [ 1.168, 1.02, 1.052 ], [ 1.008, -1.296, 0.464 ], [ 0.452, -0.484, 1.316 ], [ -0.08, -1.088, 0.28 ], [ -0.872, 0.164, 0.224 ], ... ],
    	[ [ -0.596, 1.812, -1.06 ], [ -0.604, -0.22, 0.94 ], [ -1.088, -1.964, -1.1 ], [ 1.984, -1.116, -1.764 ], [ -1.376, 0.944, -0.284 ], [ 0.18, 0.828, -0.892 ], [ -1.412, -1.048, -1.8 ], [ -0.98, 1.108, -1.632 ], ... ],
    	...
    ]
    [
    	[ [ -1.164, 1.32, -0.488 ], [ 1.112, 0.22, -1.492 ], [ 1.756, 1.968, -0.756 ], [ 0.256, -0.244, -1.272 ], [ 0.856, 0.284, -1.4 ], [ -0.836, -0.428, -0.648 ], [ 1.768, -1.544, -0.328 ], [ 1.184, -0.868, -1.06 ], ... ],
    	[ [ -1.2, -0.104, -1.568 ], [ -0.572, 0.796, -0.244 ], [ -0.164, 1.92, -1.424 ], [ -0.656, -0.948, -0.664 ], [ -1.52, 0.444, -0.3 ], [ 0.092, 0.008, 1.36 ], [ 0.148, 0.208, -1.788 ], [ -0.248, -0.408, -1.196 ], ... ],
    	[ [ -0.468, -1.224, -1.272 ], [ 1.796, -0.58, -1.772 ], [ 1.716, -1.572, -1.044 ], [ 0.408, 1.812, 1.904 ], [ -0.364, 0.716, -1.896 ], [ 1.4, -0.3, -1.54 ], [ -0.168, -0.796, -0.864 ], [ -1.952, 0.832, -0.22 ], ... ],
    	[ [ 1.28, -1.016, -1.26 ], [ -1.38, -1.124, -0.504 ], [ -0.748, -1.636, -0.448 ], [ 0.104, 0.704, 1.9 ], [ -1.168, -1.656, 1.712 ], [ 0.028, -0.772, 0.416 ], [ -1.452, 1.232, -1.768 ], [ 0.552, -0.356, -1.368 ], ... ],
    	[ [ -1.104, -1.48, 0.708 ], [ 0.304, -0.564, -0.02 ], [ 1.264, -0.532, 0.16 ], [ 0.56, -0.392, 0.216 ], [ 0.852, 1.24, -0.58 ], [ -0.256, -1.048, 1.812 ], [ -1.572, -1.532, -1.172 ], [ -1.488, 1.82, -1.04 ], ... ],
    	[ [ -1.12, 1.836, -1.176 ], [ -1.896, 1.38, -1.856 ], [ 1.052, -0.844, -1.992 ], [ -0.464, -1.168, -0.444 ], [ -1.04, -0.016, -1.188 ], [ -0.4, 1.992, -0.3 ], [ 1.692, 1.9, 0.804 ], [ -0.108, 0.464, -1.348 ], ... ],
    	[ [ -0.088, -0.524, 1.244 ], [ 0.036, 1.912, 1.516 ], [ -1.468, -0.248, -1.748 ], [ 0.384, 0.544, 1.228 ], [ 0.436, 0.388, 0.944 ], [ -1.04, -1.18, -0.716 ], [ -0.5, -1.124, -0.304 ], [ -1.068, 1.524, -1.2 ], ... ],
    	[ [ 0.36, -0.472, -1.204 ], [ 1.172, -1.24, 1.14 ], [ 1.344, -0.04, -1.616 ], [ 1.8, -0.72, 1.564 ], [ -1.0, 1.136, 1.824 ], [ 1.448, -1.208, 0.82 ], [ -0.628, 1.216, 0.388 ], [ -1.276, 0.152, 1.048 ], ... ],
    	...
    ]
    [
    	[ [ -0.888, 1.064, 0.728 ], [ -1.372, 1.144, 1.132 ], [ -0.036, 0.728, 1.136 ], [ 0.172, 1.744, -0.876 ], [ -1.372, 0.508, 1.068 ], [ 0.352, 0.484, 1.884 ], [ -1.76, -1.288, -0.208 ], [ -1.28, -0.292, 1.98 ], ... ],
    	[ [ -0.284, -0.316, -0.88 ], [ 1.884, 1.344, 0.432 ], [ -1.624, -0.644, -1.748 ], [ 1.16, -0.008, 0.988 ], [ 1.424, -0.772, -1.232 ], [ 0.252, -0.652, -1.4 ], [ -1.912, -1.404, -1.932 ], [ 0.54, 0.356, 1.96 ], ... ],
    	[ [ 0.788, -1.108, -0.944 ], [ 0.676, 0.492, 1.244 ], [ 0.628, -0.332, 1.364 ], [ 1.812, -1.884, 1.08 ], [ 0.824, 0.096, -1.28 ], [ -0.132, -1.94, -1.236 ], [ -0.048, -0.532, 0.888 ], [ 1.68, -1.304, -1.536 ], ... ],
    	[ [ -0.108, 1.924, -0.652 ], [ -0.616, 0.364, 1.292 ], [ -0.084, 1.74, -0.332 ], [ 0.464, -1.728, 0.924 ], [ 1.476, 1.74, -0.332 ], [ 1.244, -0.264, -1.008 ], [ -0.652, 1.156, -1.208 ], [ 1.908, -1.992, 1.652 ], ... ],
    	[ [ 1.78, -0.532, 0.336 ], [ -0.644, 0.3, 1.932 ], [ 0.788, 0.612, -1.596 ], [ -1.328, 1.656, -1.544 ], [ -1.592, -0.82, -0.388 ], [ 0.652, -1.8, -0.52 ], [ 1.324, 1.188, 0.388 ], [ -1.048, 1.28, -1.152 ], ... ],
    	[ [ 0.488, -1.468, -1.764 ], [ 1.736, 0.232, 1.272 ], [ -1.656, 1.152, -1.912 ], [ -0.676, 1.872, 1.82 ], [ -1.86, 0.104, 1.2 ], [ 0.288, -1.688, 0.768 ], [ -0.864, -1.904, 1.716 ], [ 0.516, 1.748, 0.724 ], ... ],
    	[ [ -1.86, 1.016, 0.916 ], [ -0.248, 0.376, 0.02 ], [ 1.408, -1.172, -1.86 ], [ 1.432, -0.788, 0.084 ], [ -0.348, -0.82, -1.156 ], [ -0.968, -0.892, -0.352 ], [ 1.916, 0.448, -1.128 ], [ -1.232, -1.848, 1.704 ], ... ],
    	[ [ -1.788, 0.564, 1.104 ], [ 0.724, -0.476, -1.696 ], [ -1.752, -0.992, -1.964 ], [ 1.188, 0.02, 0.48 ], [ 1.152, 1.572, -1.056 ], [ -0.084, 1.868, -0.892 ], [ 0.748, -0.628, 1.36 ], [ -1.988, 0.004, -0.18 ], ... ],
    	...
    ]
    [
    	[ [ 1.932, -1.22, -1.84 ], [ -0.3, 0.0, -0.172 ], [ -0.444, -1.536, -1.512 ], [ 1.716, -0.996, -1.796 ], [ -1.06, -0.184, 0.792 ], [ -1.672, -0.708, 1.24 ], [ -0.764, 1.176, 1.84 ], [ 1.696, -0.356, 1.612 ], ... ],
    	[ [ 1.36, 0.22, 1.776 ], [ 1.636, -1.836, -1.46 ], [ -0.86, -0.196, 1.788 ], [ -0.104, 0.172, 1.74 ], [ 1.844, -0.508, -0.592 ], [ -1.968, -0.444, -1.584 ], [ 0.084, -0.668, -1.408 ], [ -0.988, 1.84, 1.06 ], ... ],
    	[ [ 1.136, -1.7, 0.44 ], [ 0.144, -1.212, 1.876 ], [ -0.516, 0.88, -0.408 ], [ -0.296, -0.74, -1.16 ], [ 0.788, 0.98, 1.328 ], [ 1.196, -0.076, 0.448 ], [ -0.868, -1.204, 1.16 ], [ 1.596, -1.76, -0.308 ], ... ],
    	[ [ 0.588, 0.136, 0.264 ], [ 0.388, -1.036, -1.936 ], [ -0.644, 1.868, -0.936 ], [ 1.5, 0.548, 1.532 ], [ 0.708, 0.42, -0.488 ], [ 0.008, -0.18, 0.824 ], [ -0.148, -0.112, -0.144 ], [ -0.556, 1.476, 0.816 ], ... ],
    	[ [ -0.872, 0.26, -1.84 ], [ -0.132, -1.924, -0.996 ], [ -0.924, -1.936, 1.808 ], [ -0.176, 0.476, -1.94 ], [ 0.996, -1.168, -0.452 ], [ -0.272, -1.448, 0.208 ], [ -0.848, -0.16, -1.204 ], [ -1.04, -0.16, -1.344 ], ... ],
    	[ [ 0.188, -1.772, -1.844 ], [ 0.888, 1.008, -1.624 ], [ -0.732, -0.42, -1.784 ], [ -0.756, -0.168, -1.412 ], [ 1.072, 1.756, -0.34 ], [ -0.18, -0.016, -0.256 ], [ 0.64, 1.932, -1.82 ], [ 1.336, -1.28, -1.072 ], ... ],
    	[ [ 0.772, 1.084, 1.908 ], [ 1.688, -0.548, -1.56 ], [ -0.716, 0.26, -1.232 ], [ 1.2, -0.624, 1.94 ], [ 1.376, 0.14, -1.544 ], [ -1.036, -0.28, 1.94 ], [ -1.412, -0.272, 0.124 ], [ 1.484, 0.808, -0.664 ], ... ],
    	[ [ -0.58, -1.44, -1.912 ], [ 1.132, -0.376, -0.568 ], [ 0.736, 1.696, -0.3 ], [ 0.368, 0.304, 1.36 ], [ -0.948, -1.872, -1.068 ], [ 1.704, 0.224, -1.236 ], [ 1.728, 1.3, 1.628 ], [ -0.832, 1.636, 1.388 ], ... ],
    	...
    ]

Gradient Descent

First, we train using basic gradient descent method apply weak line search conditions.

TrainingTester.java:480 executed in 31.48 seconds (2.814 gc):

    IterativeTrainer iterativeTrainer = new IterativeTrainer(trainable.addRef());
    try {
      iterativeTrainer.setLineSearchFactory(label -> new ArmijoWolfeSearch());
      iterativeTrainer.setOrientation(new GradientDescent());
      iterativeTrainer.setMonitor(TrainingTester.getMonitor(history));
      iterativeTrainer.setTimeout(30, TimeUnit.SECONDS);
      iterativeTrainer.setMaxIterations(250);
      iterativeTrainer.setTerminateThreshold(0);
      return iterativeTrainer.run();
    } finally {
      iterativeTrainer.freeRef();
    }
Logging
Reset training subject: 8530738830264
Reset training subject: 8531608076065
Constructing line search parameters: GD
th(0)=1.9378774646408345;dx=-3.9972297796401945E-7
New Minimum: 1.9378774646408345 > 1.9378766034639274
WOLFE (weak): th(2.154434690031884)=1.9378766034639274; dx=-3.997228454886349E-7 evalInputDelta=8.611769071187325E-7
New Minimum: 1.9378766034639274 > 1.9378757422873054
WOLFE (weak): th(4.308869380063768)=1.9378757422873054; dx=-3.9972271301327653E-7 evalInputDelta=1.7223535291321923E-6
New Minimum: 1.9378757422873054 > 1.9378722975836713
WOLFE (weak): th(12.926608140191302)=1.9378722975836713; dx=-3.997221831121045E-7 evalInputDelta=5.167057163246724E-6
New Minimum: 1.9378722975836713 > 1.9378567964738296
WOLFE (weak): th(51.70643256076521)=1.9378567964738296; dx=-3.9971979856200815E-7 evalInputDelta=2.0668167004966875E-5
New Minimum: 1.9378567964738296 > 1.9377741254497522
WOLFE (weak): th(258.53216280382605)=1.9377741254497522; dx=-3.9970708110459483E-7 evalInputDelta=1.0333919108229672E-4
New Minimum: 1.9377741254497522 > 1.9372574911394032
WOLFE (weak): th(1551.1929768229563)=1.9372574911394032; dx=-3.9962760245678594E-7 evalInputDelta=6.199735014313923E-4
New Minimum: 1.9372574911394032 > 1.9335407560354732
WOLFE (weak): th(10858.350837760694)=1.9335407560354732; dx=-3.9905563435861937E-7 evalInputDelta=0.004336708605361395
New Minimum: 1.9335407560354732 > 1.903386183082749
WOLFE (weak): th(86866.80670208555)=1.903386183082749; dx=-3.944029614306556E-7 evalInputDelta=0.03449128155808556
New Minimum: 1.903386183082749 > 1.643697041217867
END: th(781801.26031877)=1.643697041217867; dx=-3.5347174025816194E-7 evalInputDelta=0.29418042342296746
Fitness changed from 1.9378774646408345 to 1.643697041217867
Iteration 1 complete. Error: 1.643697041217867 Total: 18.1106; Orientation: 0.8506; Line Search: 15.0831
th(0)=1.643697041217867;dx=-3.137109421927425E-7
New Minimum: 1.643697041217867 > 1.1715536994054105
END: th(1684339.7559414052)=1.1715536994054105; dx=-2.489960547589457E-7 evalInputDelta=0.47214334181245654
Fitness changed from 1.643697041217867 to 1.1715536994054105
Iteration 2 complete. Error: 1.1715536994054105 Total: 5.0136; Orientation: 1.0191; Line Search: 3.3341
th(0)=1.1715536994054105;dx=-2.0053992586818306E-7
New Minimum: 1.1715536994054105 > 0.5635195986072014
END: th(3628800.0)=0.5635195986072014; dx=-1.368068526393922E-7 evalInputDelta=0.6080341007982092
Fitness changed from 1.1715536994054105 to 0.5635195986072014
Iteration 3 complete. Error: 0.5635195986072014 Total: 4.1687; Orientation: 1.0301; Line Search: 2.4462
th(0)=0.5635195986072014;dx=-9.726524006937596E-8
New Minimum: 0.5635195986072014 > 0.055516416269176506
END: th(7818012.6031877)=0.055516416269176506; dx=-2.4548682552389845E-8 evalInputDelta=0.5080031823380249
Fitness changed from 0.5635195986072014 to 0.055516416269176506
Iteration 4 complete. Error: 0.055516416269176506 Total: 4.1759; Orientation: 1.0252; Line Search: 2.4732
Final threshold in iteration 4: 0.055516416269176506 (> 0.0) after 31.470s (< 30.000s)

Returns

    0.055516416269176506

This training apply resulted in the following configuration:

TrainingTester.java:610 executed in 0.00 seconds (0.000 gc):

    RefList<double[]> state = network.state();
    assert state != null;
    String description = state.stream().map(RefArrays::toString).reduce((a, b) -> a + "\n" + b)
        .orElse("");
    state.freeRef();
    return description;

Returns

    

And regressed input:

TrainingTester.java:622 executed in 0.01 seconds (0.000 gc):

    return RefArrays.stream(RefUtil.addRef(data)).flatMap(x -> {
      return RefArrays.stream(x);
    }).limit(1).map(x -> {
      String temp_18_0015 = x.prettyPrint();
      x.freeRef();
      return temp_18_0015;
    }).reduce((a, b) -> a + "\n" + b).orElse("");

Returns

    [
    	[ [ -1.326967887421167, 0.8720917743820548, -1.6171238869608877 ], [ 0.8822387396043334, 1.0348752205923795, -0.19711396019671262 ], [ -0.8713927520556917, 0.2795993786450688, -0.9042066265893771 ], [ -1.2554093075481851, 0.7727619015093747, -2.48135259396119 ], [ -1.5085989727046025, -1.4971118394609888, 1.8257108121655914 ], [ -0.2941922271171751, -0.6232527549930749, 0.01744498211025014 ], [ -1.5228834591621223, -0.8689469645785856, -0.6201695762592925 ], [ 0.9878146047042877, 0.7137187539808805, 1.022466641314832 ], ... ],
    	[ [ 0.06397477025759468, -0.05522963680125015, 0.22325486654365556 ], [ -1.399930477323312, 0.2075237937005062, 0.3404066836228059 ], [ 0.7984633275683659, 1.983349661002871, 1.0741870114287637 ], [ -1.5478892611166926, 1.725588652710098, -0.7336993915934052 ], [ 1.1828010652409289, -0.17594460365309172, 0.6931435384121627 ], [ 0.6305807223638056, 1.3759358061525049, 1.3454834714836894 ], [ -0.08083565517181146, 2.3871736127467145, -0.8423379575749033 ], [ -1.0257804095306413, -0.7260749409268512, -1.4201446495425074 ], ... ],
    	[ [ 0.11491000231774917, 0.6956460709601834, 0.6214439267220675 ], [ 0.03148890516862027, -1.5841877028775477, -0.31130120229107233 ], [ -0.8823377454703217, -0.6309207834745302, 0.9212585289448518 ], [ 0.5239874563169413, -0.5064525506538484, -0.893534905663093 ], [ -0.4629389602222357, 0.5951651345465175, 1.7797738256757183 ], [ 0.06289186914792683, -1.0565789479468521, -0.30631292120107456 ], [ -1.208118679976386, -0.2152070273104264, 0.12732570728681217 ], [ 2.7633247379035684, 0.16335383961420957, -1.0706785775177774 ], ... ],
    	[ [ 1.0697994105310173, -0.1807357161575233, -0.7850636943734942 ], [ 0.39201711272541184, -0.007748535059945272, 2.3877314223345336 ], [ 0.7819205148533684, 0.7382707358756109, -0.5041912507289796 ], [ 0.21635873849831783, 0.8484258016716277, 1.8832154598300546 ], [ -1.196404137744421, 0.8855402892059286, 1.5388638485384925 ], [ 0.08726184372215623, -0.9012633999588641, -0.2019984437632919 ], [ 0.5642963581120555, -2.024307182830678, 0.2480108247186229 ], [ -2.594563001697559, 0.5549073097528888, -0.8243443080553297 ], ... ],
    	[ [ 1.3430633119360562, 0.7046797734711383, 0.25625691459280575 ], [ -0.21059443886297655, 0.3403093046690735, -0.10971486580609685 ], [ -0.9003780294147624, 0.307657974877072, 0.4327200545376907 ], [ -1.7502145286670392, 1.897552650968108, 1.248661877698931 ], [ -0.5647619675019172, -1.3360653818040904, -0.2511726506939923 ], [ -0.3201895080709072, -1.0761141810584658, -2.303696310870627 ], [ 0.46918736570187214, -0.5907014083137077, 0.13351404261183558 ], [ 0.0016495369813429306, -1.0315560502487424, 0.39790651326739956 ], ... ],
    	[ [ 0.5547211699035279, 0.4919513837524431, -0.8346725536559713 ], [ 0.09372708170792121, -1.011583794957644, 1.1218567132497228 ], [ 1.4474293574495696, 0.451909170011849, -1.1153385274614187 ], [ -1.5447718962385024, -0.8727872533233733, -0.08644085043812422 ], [ 1.210910597356112, -0.9457418780000917, 0.40283128064397955 ], [ 0.420346573517027, -0.1119292869325631, 1.039582713415536 ], [ -0.8568869098211588, 1.0307279304786565, 0.4661589793425026 ], [ -1.667286747124852, 0.06698747868024568, -1.7757007315553932 ], ... ],
    	[ [ 0.5296566065141235, 1.0639727994781436, 0.7903705940077328 ], [ 0.6038949995652367, 1.38167266389794, 1.3264323365368236 ], [ -0.2249436322443233, 1.6529803493949176, 0.06396328284940589 ], [ 1.1828010281057306, -0.06084182305188264, 0.7660407949461521 ], [ -1.365117904740713, 2.247962437700526, -0.6788445329598127 ], [ -1.7477216272774685, -1.3420087387482451, -0.8302696339742863 ], [ 0.6658626198696703, 2.457937568814046, -0.35980018868371627 ], [ -0.5667667233887768, 0.6816477551835635, 1.2211189682052135 ], ... ],
    	[ [ -0.73637003906604, -0.3588492233961172, -1.8367807375378427 ], [ 1.2708911686355928, 0.5954545200797046, -0.3503456887152976 ], [ -0.18062853724797026, -0.2881720217110892, 1.4248005589590595 ], [ -0.19141319984247696, 0.3384787962814162, 0.952934403561061 ], [ 0.09233463601766101, 0.41937345241282065, 0.06829191156951842 ], [ 0.46473889407138097, 0.218106661898968, -0.5588455559703489 ], [ 0.9985881908629729, 1.5922841490484818, 0.39712766008854505 ], [ 0.007523046715959991, 0.5466244529577912, -2.706147499673751 ], ... ],
    	...
    ]

To produce the following output:

TrainingTester.java:633 executed in 0.07 seconds (0.000 gc):

    Result[] array = ConstantResult.batchResultArray(pop(RefUtil.addRef(data)));
    @Nullable
    Result eval = layer.eval(array);
    assert eval != null;
    TensorList tensorList = Result.getData(eval);
    String temp_18_0016 = tensorList.stream().limit(1).map(x -> {
      String temp_18_0017 = x.prettyPrint();
      x.freeRef();
      return temp_18_0017;
    }).reduce((a, b) -> a + "\n" + b).orElse("");
    tensorList.freeRef();
    return temp_18_0016;

Returns

    [
    	[ [ -2.3762701973720524, -0.1772105355688306, -2.666426196911773 ], [ -0.918175696292546, -0.7655392153045, -1.9975283960935921 ], [ -1.63491976755985, -0.48392763685908946, -1.6677336420935354 ], [ -2.1853381286596623, -0.15716691960210216, -3.4112814150726667 ], [ -3.403546724769331, -3.392059591525717, -0.06923693989913704 ], [ -1.1266354834091161, -1.4556960112850157, -0.8149982741816908 ], [ -1.6844309921221279, -1.0304944975385912, -0.7817171092192982 ], [ -1.0280176400629997, -1.302113490786407, -0.9933656034524554 ], ... ],
    	[ [ -1.118510566717636, -1.237714973776481, -0.9592304704315753 ], [ -2.4586787091062208, -0.8512244380824027, -0.718341548160103 ], [ -1.7205854433512437, -0.5356991099167387, -1.444861759490846 ], [ -3.389811252172099, -0.11633333834530823, -2.5756213826488117 ], [ -0.6258417175063418, -1.9845873864003623, -1.115499244335108 ], [ -1.63922531455783, -0.8938702307691306, -0.9243225654379461 ], [ -2.585196781464436, -0.11718751354591017, -3.346699083867528 ], [ -1.1064381745708838, -0.8067327059670938, -1.50080241458275 ], ... ],
    	[ [ -1.4922033390092326, -0.9114672703667984, -0.9856694146049143 ], [ -0.6463402913412902, -2.2620168993874583, -0.9891303988009827 ], [ -2.1231340857359826, -1.8717171237401908, -0.31953781132080894 ], [ -0.46948071056275653, -1.4999207175335463, -1.8870030725427909 ], [ -2.587745378415121, -1.5296412836463675, -0.3450325925171669 ], [ -0.7019762541790098, -1.8214470712737887, -1.0711810445280112 ], [ -2.015005258343595, -1.0220936056776353, -0.6795608710803968 ], [ -0.09157468302269622, -2.691545581312055, -3.9255779984440418 ], ... ],
    	[ [ -0.36660360951279003, -1.6171387362013308, -2.2214667144173017 ], [ -2.2003235271525696, -2.6000891749379265, -0.20460921754344774 ], [ -0.8036291148139866, -0.8472788937917441, -2.0897408803963344 ], [ -2.1013243716991497, -1.4692573085258396, -0.43446765036741275 ], [ -3.195972402472128, -1.1140279755217786, -0.46070441618921465 ], [ -0.7518604958806172, -1.7403857395616376, -1.0411207833660652 ], [ -0.5899932201940297, -3.178596761136763, -0.9062787535874622 ], [ -3.4077043131894547, -0.2582340017390067, -1.6374856195472252 ], ... ],
    	[ [ -0.623495563506421, -1.261879101971339, -1.7103019608496715 ], [ -1.3457232638657974, -0.7948195203337474, -1.2448436908089178 ], [ -2.096750919801725, -0.8887149155098906, -0.7636528358492719 ], [ -4.085166513877708, -0.4373993342425604, -1.0862901075117373 ], [ -1.0405373262974482, -1.8118407405996213, -0.7269480094895232 ], [ -0.47446996105735906, -1.2303946340449177, -2.457976763857079 ], [ -0.7233615650359497, -1.7832503390515295, -1.0590348881259863 ], [ -1.0445486958840955, -2.0777542831141806, -0.6482917195980389 ], ... ],
    	[ [ -0.7831642927967692, -0.845934078947854, -2.1725580163562683 ], [ -1.4175361246454459, -2.5228470013110114, -0.3894064931036443 ], [ -0.369231345656027, -1.3647515330937476, -2.9319992305670155 ], [ -1.981952562339688, -1.309967919424559, -0.5236215165393099 ], [ -0.4455989557364781, -2.602251431092682, -1.2536782724486106 ], [ -1.2368587450640551, -1.7691346055136452, -0.6176226051655462 ], [ -2.4299610842873722, -0.542346243987557, -1.1069151951237108 ], [ -2.0231450950662033, -0.2888708692611056, -2.1315590794967445 ], ... ],
    	[ [ -1.3873281978804464, -0.8530120049164263, -1.126614210386837 ], [ -1.655611762497935, -0.8778340981652317, -0.9330744255263481 ], [ -2.183224953384066, -0.3053009717448252, -1.8943180382903368 ], [ -0.6665520968706655, -1.9101949480282787, -1.083312330030244 ], [ -3.6905379779124665, -0.07745763547122797, -3.0042646061315663 ], [ -1.6100930091248262, -1.2043801205956028, -0.692641015821644 ], [ -1.9961213313648711, -0.20404638242049566, -3.0217841399182577 ], [ -2.347712904006279, -1.0992984254339389, -0.5598272124122889 ], ... ],
    	[ [ -1.0265426203585606, -0.6490218046886378, -2.1269533188303633 ], [ -0.53449618721252, -1.2099328357684083, -2.1557330445634104 ], [ -1.9283327844429503, -2.035876268906069, -0.3229036882359204 ], [ -1.7645836515362863, -1.234691655412393, -0.6202360481327482 ], [ -1.2128629619801785, -0.8858241455850188, -1.236905686428321 ], [ -0.7611481742362001, -1.007780406408613, -1.78473262427793 ], [ -1.2115477069395357, -0.6178517487540267, -1.8130082377139636 ], [ -1.0227237644678406, -0.4836223582260094, -3.7363943108575515 ], ... ],
    	...
    ]

Conjugate Gradient Descent

First, we use a conjugate gradient descent method, which converges the fastest for purely linear functions.

TrainingTester.java:452 executed in 47.27 seconds (1.878 gc):

    IterativeTrainer iterativeTrainer = new IterativeTrainer(trainable.addRef());
    try {
      iterativeTrainer.setLineSearchFactory(label -> new QuadraticSearch());
      iterativeTrainer.setOrientation(new GradientDescent());
      iterativeTrainer.setMonitor(TrainingTester.getMonitor(history));
      iterativeTrainer.setTimeout(30, TimeUnit.SECONDS);
      iterativeTrainer.setMaxIterations(250);
      iterativeTrainer.setTerminateThreshold(0);
      return iterativeTrainer.run();
    } finally {
      iterativeTrainer.freeRef();
    }
Logging
Reset training subject: 8562527872192
Reset training subject: 8563241299847
Constructing line search parameters: GD
F(0.0) = LineSearchPoint{point=PointSample{avg=1.9378774646408345}, derivative=-3.9972297796401945E-7}
F(1.0E-10) = LineSearchPoint{point=PointSample{avg=1.9378774646408345}, derivative=-3.9972297796401945E-7}, evalInputDelta = 0.0
F(7.000000000000001E-10) = LineSearchPoint{point=PointSample{avg=1.9378774646408345}, derivative=-3.997229779640194E-7}, evalInputDelta = 0.0
New Minimum: 1.9378774646408345 > 1.9378774646408328
F(4.900000000000001E-9) = LineSearchPoint{point=PointSample{avg=1.9378774646408328}, derivative=-3.9972297796401914E-7}, evalInputDelta = -1.7763568394002505E-15
New Minimum: 1.9378774646408328 > 1.937877464640821
F(3.430000000000001E-8) = LineSearchPoint{point=PointSample{avg=1.937877464640821}, derivative=-3.9972297796401734E-7}, evalInputDelta = -1.354472090042691E-14
New Minimum: 1.937877464640821 > 1.937877464640739
F(2.4010000000000004E-7) = LineSearchPoint{point=PointSample{avg=1.937877464640739}, derivative=-3.997229779640047E-7}, evalInputDelta = -9.547918011776346E-14
New Minimum: 1.937877464640739 > 1.937877464640163
F(1.6807000000000003E-6) = LineSearchPoint{point=PointSample{avg=1.937877464640163}, derivative=-3.997229779639161E-7}, evalInputDelta = -6.714628852932947E-13
New Minimum: 1.937877464640163 > 1.937877464636132
F(1.1764900000000001E-5) = LineSearchPoint{point=PointSample{avg=1.937877464636132}, derivative=-3.99722977963296E-7}, evalInputDelta = -4.702460643102313E-12
New Minimum: 1.937877464636132 > 1.9378774646079158
F(8.235430000000001E-5) = LineSearchPoint{point=PointSample{avg=1.9378774646079158}, derivative=-3.997229779589555E-7}, evalInputDelta = -3.2918778813950667E-11
New Minimum: 1.9378774646079158 > 1.9378774644104027
F(5.764801000000001E-4) = LineSearchPoint{point=PointSample{avg=1.9378774644104027}, derivative=-3.9972297792857187E-7}, evalInputDelta = -2.3043189578686452E-10
New Minimum: 1.9378774644104027 > 1.9378774630278084
F(0.004035360700000001) = LineSearchPoint{point=PointSample{avg=1.9378774630278084}, derivative=-3.9972297771588666E-7}, evalInputDelta = -1.6130261570879156E-9
New Minimum: 1.9378774630278084 > 1.9378774533496501
F(0.028247524900000005) = LineSearchPoint{point=PointSample{avg=1.9378774533496501}, derivative=-3.997229762270895E-7}, evalInputDelta = -1.1291184431883039E-8
New Minimum: 1.9378774533496501 > 1.9378773856025426
F(0.19773267430000002) = LineSearchPoint{point=PointSample{avg=1.9378773856025426}, derivative=-3.9972296580551006E-7}, evalInputDelta = -7.903829191135969E-8
New Minimum: 1.9378773856025426 > 1.93787691137284
F(1.3841287201) = LineSearchPoint{point=PointSample{avg=1.93787691137284}, derivative=-3.997228928544583E-7}, evalInputDelta = -5.532679945297048E-7
New Minimum: 1.93787691137284 > 1.9378735917673438
F(9.688901040700001) = LineSearchPoint{point=PointSample{avg=1.9378735917673438}, derivative=-3.997223821973183E-7}, evalInputDelta = -3.87287349079557E-6
New Minimum: 1.9378735917673438 > 1.9378503546476165
F(67.8223072849) = LineSearchPoint{point=PointSample{avg=1.9378503546476165}, derivative=-3.997188076082183E-7}, evalInputDelta = -2.710999321808849E-5
New Minimum: 1.9378503546476165 > 1.9376877006279307
F(474.7561509943) = LineSearchPoint{point=PointSample{avg=1.9376877006279307}, derivative=-3.9969378601766304E-7}, evalInputDelta = -1.8976401290382583E-4
New Minimum: 1.9376877006279307 > 1.9365494075632483
F(3323.2930569601003) = LineSearchPoint{point=PointSample{avg=1.9365494075632483}, derivative=-3.9951866101417063E-7}, evalInputDelta = -0.0013280570775862088
New Minimum: 1.9365494075632483 > 1.9285953147815669
F(23263.0513987207) = LineSearchPoint{point=PointSample{avg=1.9285953147815669}, derivative=-3.9829406854271666E-7}, evalInputDelta = -0.009282149859267674
New Minimum: 1.9285953147815669 > 1.873597210229629
F(162841.3597910449) = LineSearchPoint{point=PointSample{avg=1.873597210229629}, derivative=-3.897855119755389E-7}, evalInputDelta = -0.06428025441120555
New Minimum: 1.873597210229629 > 1.520706840737654
F(1139889.5185373144) = LineSearchPoint{point=PointSample{avg=1.520706840737654}, derivative=-3.3360103276409784E-7}, evalInputDelta = -0.4171706239031805
New Minimum: 1.520706840737654 > 0.1811624877767998
F(7979226.6297612) = LineSearchPoint{point=PointSample{avg=0.1811624877767998}, derivative=-7.750883362447117E-8}, evalInputDelta = -1.7567149768640347
F(5.58545864083284E7) = LineSearchPoint{point=PointSample{avg=84.21794041434808}, derivative=3.782798175163346E-6}, evalInputDelta = 82.28006294970724
F(4296506.646794492) = LineSearchPoint{point=PointSample{avg=0.6988937386833485}, derivative=-1.989630760298462E-7}, evalInputDelta = -1.238983725957486
F(3.007554652756145E7) = LineSearchPoint{point=PointSample{avg=15.207547742932519}, derivative=1.5749136626357323E-6}, evalInputDelta = 13.269670278291684
F(2313503.579043188) = LineSearchPoint{point=PointSample{avg=1.1645780218201005}, derivative=-2.750268640080921E-7}, evalInputDelta = -0.773299442820734
F(1.6194525053302318E7) = LineSearchPoint{point=PointSample{avg=1.4128436592226208}, derivative=4.2931098087024724E-7}, evalInputDelta = -0.5250338054182138
1.4128436592226208 <= 1.9378774646408345
F(7808273.366897162) = LineSearchPoint{point=PointSample{avg=0.19498568564932034}, derivative=-8.418207308123576E-8}, evalInputDelta = -1.7428917789915142
Left bracket at 7808273.366897162
New Minimum: 0.1811624877767998 > 0.11827037081384639
F(9183115.824667387) = LineSearchPoint{point=PointSample{avg=0.11827037081384639}, derivative=-2.5459089131585673E-8}, evalInputDelta = -1.8196070938269882
Left bracket at 9183115.824667387
New Minimum: 0.11827037081384639 > 0.11196669940099149
F(9575630.865266077) = LineSearchPoint{point=PointSample{avg=0.11196669940099149}, derivative=-6.49538188833535E-9}, evalInputDelta = -1.825910765239843
Left bracket at 9575630.865266077
New Minimum: 0.11196669940099149 > 0.11156816461988146
F(9674280.745358797) = LineSearchPoint{point=PointSample{avg=0.11156816461988146}, derivative=-1.5740556905330355E-9}, evalInputDelta = -1.8263093000209532
Left bracket at 9674280.745358797
Converged to left
Fitness changed from 1.9378774646408345 to 0.11156816461988146
Iteration 1 complete. Error: 0.11156816461988146 Total: 47.2669; Orientation: 1.2616; Line Search: 43.7837
Final threshold in iteration 1: 0.11156816461988146 (> 0.0) after 47.268s (< 30.000s)

Returns

    0.11156816461988146

This training apply resulted in the following configuration:

TrainingTester.java:610 executed in 0.00 seconds (0.000 gc):

    RefList<double[]> state = network.state();
    assert state != null;
    String description = state.stream().map(RefArrays::toString).reduce((a, b) -> a + "\n" + b)
        .orElse("");
    state.freeRef();
    return description;

Returns

    

And regressed input:

TrainingTester.java:622 executed in 0.01 seconds (0.000 gc):

    return RefArrays.stream(RefUtil.addRef(data)).flatMap(x -> {
      return RefArrays.stream(x);
    }).limit(1).map(x -> {
      String temp_18_0015 = x.prettyPrint();
      x.freeRef();
      return temp_18_0015;
    }).reduce((a, b) -> a + "\n" + b).orElse("");

Returns

    [
    	[ [ -1.2551360276843389, 0.48235733454896246, -1.299221306864624 ], [ 0.934643456835893, 0.6377562738907128, 0.14760026927339442 ], [ -0.8765296479545172, 0.269159172340192, -0.8886295243856748 ], [ -1.3335726069336227, 0.9360875478593076, -2.566514940925685 ], [ -1.4559629624002295, -0.9848109571601595, 1.2607739195603893 ], [ -0.37687222681135296, -0.6609084628411117, 0.13778068965246448 ], [ -1.5738306978599794, -0.8858890917432396, -0.552280210396781 ], [ 0.8855821541458004, 0.7509785788118903, 1.0874392670423094 ], ... ],
    	[ [ -0.5932085085102017, 0.034667191372230066, 0.7905413171379718 ], [ -1.3637312974765103, 0.1814953060002219, 0.33023599147628846 ], [ 0.7914078393231864, 1.982921184884432, 1.081670975792382 ], [ -1.3750012290068414, 1.6414658141436318, -0.8224645851367908 ], [ 1.0736324887130042, -0.6125950896013279, 1.238962600888324 ], [ 0.5673715737243779, 1.3720961066688524, 1.4125323196067698 ], [ 0.3512213493237675, 1.87605774850284, -0.7632790978266074 ], [ -1.0420427288887582, -0.7204722661971837, -1.4094850049140581 ], ... ],
    	[ [ 0.3128634652601373, 0.6473086357596373, 0.4718278989802256 ], [ -0.026034623153295022, -1.5426062081119416, -0.2953591687347632 ], [ -0.9254916894243437, -0.6508560603420692, 0.9843477497664128 ], [ 0.5702361679155998, -0.4408772041957033, -1.0053589637198965 ], [ -0.49307150543324363, 0.6734860121414213, 1.731585493291822 ], [ 0.4686426879370873, -2.0646209012647727, 0.2959782133276858 ], [ -1.1615597035154632, -0.23897708933975054, 0.1045367928552138 ], [ 2.2868565067474407, 0.6746566784278236, -1.105513185175264 ], ... ],
    	[ [ 1.0860975750107489, -0.19551118098821457, -0.7865863940225344 ], [ 0.38834188632425026, 0.04884359454425169, 2.3348145191314984 ], [ 1.4017519377439265, 0.07460563560273892, -0.46035757334666494 ], [ 0.48265535197788667, 0.6519278398956119, 1.8134168081265014 ], [ -1.021479036082789, 0.9591542376780753, 1.290324798404714 ], [ 0.7673508426387221, -2.3734446939081533, 0.590093851269432 ], [ 0.5765981429223314, -2.050834051600493, 0.2622359086781616 ], [ -2.60098170837061, 0.6047289331939205, -0.8677472248233106 ], ... ],
    	[ [ 1.519708409484792, 0.5666184017744533, 0.21767318874075503 ], [ -0.2124793959462381, 0.5230018315981415, -0.2905224356519033 ], [ -1.9008275500971308, 0.7095099855573572, 1.0313175645397727 ], [ -1.6823022203339044, 1.9446514083480742, 1.1336508119858302 ], [ -0.5611673600495104, -1.4008544957493467, -0.18997814420114278 ], [ -0.42692933258407684, -1.1131864541366598, -2.1598842132792635 ], [ 0.4835029830095706, -0.19892240096544866, -0.27258058204412206 ], [ -0.037277735185184735, -1.0128321149277744, 0.4181098501129591 ], ... ],
    	[ [ 0.9946505723155619, 0.8652124138448519, -1.647862986160414 ], [ 0.20168875131161057, -1.0319070648380275, 1.0342183135264167 ], [ 1.317998105567323, 0.49158657627181823, -1.0255846818391416 ], [ -1.5370759626948494, -0.8709194800531319, -0.09600455725201884 ], [ 1.0779718822401336, -0.9207805707151492, 0.5108086884750155 ], [ 0.2967305280052097, 0.013602020528312386, 1.0376674514664779 ], [ -0.9465566893001514, 1.2739149472356952, 0.31264174206445605 ], [ -1.5935045638992416, -0.05173669806490766, -1.7307587380358505 ], ... ],
    	[ [ 0.8228566134802009, 0.7005425943041457, 0.8606007922156534 ], [ 0.5837655124670222, 1.3586225339368665, 1.3696119535961115 ], [ -0.21084254903745714, 1.6367987492829803, 0.06604379975447716 ], [ 1.0230131543764485, 0.021311942690709268, 0.8436749029328423 ], [ -1.4256798064169196, 1.9378396366274013, -0.308159830210482 ], [ -1.8637386803851477, -1.28502611297392, -0.7712352066409325 ], [ 0.5891245627790839, 2.6072624843749734, -0.43238704715405696 ], [ -0.46842965420448873, 0.7190125770109176, 1.0854170771935712 ], ... ],
    	[ [ -0.7215486308556199, -0.36444068885463543, -1.8460106802897447 ], [ 1.2492243496044841, 0.624304268685568, -0.3575286182900521 ], [ -0.20773001664727944, -0.278248846991775, 1.4419788636390543 ], [ -0.9618065207517863, 0.9371092015245883, 1.1246973192271976 ], [ -0.8174643775876824, 0.7902707088866066, 0.6071936687010766 ], [ 0.9030029743762394, 0.9439442390276251, -1.722947213403864 ], [ 1.0697029371005529, 1.4962590220097525, 0.42203804088969477 ], [ 0.12783132841660444, 0.437599725861614, -2.7174310542782187 ], ... ],
    	...
    ]

To produce the following output:

TrainingTester.java:633 executed in 0.08 seconds (0.000 gc):

    Result[] array = ConstantResult.batchResultArray(pop(RefUtil.addRef(data)));
    @Nullable
    Result eval = layer.eval(array);
    assert eval != null;
    TensorList tensorList = Result.getData(eval);
    String temp_18_0016 = tensorList.stream().limit(1).map(x -> {
      String temp_18_0017 = x.prettyPrint();
      x.freeRef();
      return temp_18_0017;
    }).reduce((a, b) -> a + "\n" + b).orElse("");
    tensorList.freeRef();
    return temp_18_0016;

Returns

    [
    	[ [ -2.0333913836701267, -0.29589802143682553, -2.0774766628504118 ], [ -0.7876918675520466, -1.0845790504972268, -1.5747350551145451 ], [ -1.635608400741701, -0.4899195804469921, -1.6477082771728588 ], [ -2.3949405203851883, -0.1252803655922583, -3.627882854377251 ], [ -2.8754107452313433, -2.404258739991273, -0.15867386327072452 ], [ -1.2313341681860959, -1.5153704042158547, -0.7166812517222785 ], [ -1.7521717482616883, -1.0642301421449485, -0.73062126079849 ], [ -1.1306713579093524, -1.2652749332432625, -0.9288142450128434 ], ... ],
    	[ [ -1.9262118335852856, -1.2983361337028538, -0.5424620079371121 ], [ -2.4096493867670388, -0.8644227832903063, -0.7156820978142397 ], [ -1.7279031438231134, -0.536389798261868, -1.437640007353918 ], [ -3.142283283746683, -0.12581624059620955, -2.589746639876632 ], [ -0.8607777847647118, -2.5470053630790437, -0.6954476725893919 ], [ -1.7163950037814644, -0.9116704708369898, -0.8712342578990724 ], [ -1.7787537591701201, -0.2539173599910476, -2.893254206320495 ], [ -1.122262241109865, -0.8006917784182906, -1.489704517135165 ], ... ],
    	[ [ -1.272414669418891, -0.9379694989193909, -1.1134502356988025 ], [ -0.6847915218127778, -2.2013631067714243, -0.9541160673942459 ], [ -2.204757870797259, -1.9301222417149846, -0.29491843160650255 ], [ -0.451520161661231, -1.462633533772534, -2.0271152932967276 ], [ -2.5998136608178206, -1.4332561432431556, -0.3751566620927549 ], [ -0.6527518423809129, -3.186015431582773, -0.8254163169903144 ], [ -1.9548357895454274, -1.0322531753697148, -0.6887392931747505 ], [ -0.20951289150408758, -1.8217127198237046, -3.601882583426792 ], ... ],
    	[ [ -0.358583764415342, -1.6401925204143055, -2.2312677334486253 ], [ -2.165167990161618, -2.504666281941616, -0.2186953573543695 ], [ -0.35106359225406125, -1.6782098943952488, -2.2131731033446527 ], [ -1.786473259149318, -1.6172007712315928, -0.45571180300070324 ], [ -2.9090814530037075, -0.9284481792428432, -0.5972776185162045 ], [ -0.6317040709207762, -3.7724996074676516, -0.8089610622900663 ], [ -0.5891847097453168, -3.216616904268141, -0.9035469439894865 ], [ -3.4446385197144362, -0.23892787814990546, -1.7114040361671365 ], ... ],
    	[ [ -0.5053258455914986, -1.4584158533018372, -1.8073610663355355 ], [ -1.3891429107980469, -0.6536616832536672, -1.467185950503712 ], [ -3.507701892328038, -0.8973643566735503, -0.5755567776911348 ], [ -4.012903039140356, -0.3859494104583783, -1.1969500068206222 ], [ -1.0582425943121079, -1.8979297300119442, -0.6870533784637404 ], [ -0.5189236980293208, -1.2051808195819038, -2.2518785787245075 ], [ -0.6805133811372303, -1.3629387651122495, -1.436596946190923 ], [ -1.0830815441800676, -2.0586359239226573, -0.6276939588819238 ], ... ],
    	[ [ -0.6677119523959019, -0.7971501108666118, -3.3102255108718777 ], [ -1.2782554313505514, -2.5118512475001893, -0.4457258691357453 ], [ -0.4276179007477674, -1.2540294300432722, -2.771200688154232 ], [ -1.9701791051600033, -1.3040226225182858, -0.5291076997171728 ], [ -0.5321777724024237, -2.5309302253577064, -1.0993409661675417 ], [ -1.348416758616325, -1.6315452660932221, -0.6074798351550568 ], [ -2.619894253418221, -0.3994226168823747, -1.3606958220536138 ], [ -1.8786391447596653, -0.3368712789253313, -2.015893318896274 ], ... ],
    	[ [ -1.0727254285157206, -1.1950394476917758, -1.0349812497802682 ], [ -1.6798116140649269, -0.9049545925950826, -0.8939651729358375 ], [ -2.159159512527157, -0.31151821420671966, -1.8822731637352228 ], [ -0.7898551401078289, -1.791556351793568, -0.969193391551435 ], [ -3.49492916536134, -0.1314097223170192, -2.3774091891549025 ], [ -1.751886663676884, -1.1731740962656563, -0.6593831899326686 ], [ -2.1842914499925308, -0.1661535283966411, -3.2058030599256715 ], [ -2.198147840230015, -1.0107056090146087, -0.644301108831955 ], ... ],
    	[ [ -1.0130606793323778, -0.6559527373313933, -2.1375227287665024 ], [ -0.5514925511910367, -1.1764126321099528, -2.1582455190855727 ], [ -1.9653449754577708, -2.0358638058022662, -0.315636095171437 ], [ -2.755909606263903, -0.8569938839875282, -0.6694057662849189 ], [ -2.317269685271263, -0.7095345987969739, -0.8926116389825038 ], [ -0.748658456015778, -0.7077171913643923, -3.3746086437958813 ], [ -1.1168577163449378, -0.6903016314357382, -1.7645226125557958 ], [ -0.8842750119084266, -0.574506614463417, -3.7295373946032497 ], ... ],
    	...
    ]

Limited-Memory BFGS

Next, we apply the same optimization using L-BFGS, which is nearly ideal for purely second-order or quadratic functions.

TrainingTester.java:509 executed in 341.21 seconds (6.010 gc):

    IterativeTrainer iterativeTrainer = new IterativeTrainer(trainable.addRef());
    try {
      iterativeTrainer.setLineSearchFactory(label -> new ArmijoWolfeSearch());
      iterativeTrainer.setOrientation(new LBFGS());
      iterativeTrainer.setMonitor(TrainingTester.getMonitor(history));
      iterativeTrainer.setTimeout(30, TimeUnit.SECONDS);
      iterativeTrainer.setIterationsPerSample(100);
      iterativeTrainer.setMaxIterations(250);
      iterativeTrainer.setTerminateThreshold(0);
      return iterativeTrainer.run();
    } finally {
      iterativeTrainer.freeRef();
    }
Logging
Reset training subject: 8610115526173
Reset training subject: 8610788590171
Adding measurement 31521ab8 to history. Total: 0
LBFGS Accumulation History: 1 points
Constructing line search parameters: GD
Non-optimal measurement 1.9378774646408345 < 1.9378774646408345. Total: 1
th(0)=1.9378774646408345;dx=-3.9972297796401945E-7
Adding measurement 577b9723 to history. Total: 1
New Minimum: 1.9378774646408345 > 1.9378766034639274
WOLFE (weak): th(2.154434690031884)=1.9378766034639274; dx=-3.997228454886349E-7 evalInputDelta=8.611769071187325E-7
Adding measurement 59034758 to history. Total: 2
New Minimum: 1.9378766034639274 > 1.9378757422873054
WOLFE (weak): th(4.308869380063768)=1.9378757422873054; dx=-3.9972271301327653E-7 evalInputDelta=1.7223535291321923E-6
Adding measurement c14833c to history. Total: 3
New Minimum: 1.9378757422873054 > 1.9378722975836713
WOLFE (weak): th(12.926608140191302)=1.9378722975836713; dx=-3.997221831121045E-7 evalInputDelta=5.167057163246724E-6
Adding measurement 2062ce9e to history. Total: 4
New Minimum: 1.9378722975836713 > 1.9378567964738296
WOLFE (weak): th(51.70643256076521)=1.9378567964738296; dx=-3.9971979856200815E-7 evalInputDelta=2.0668167004966875E-5
Adding measurement 7d0967fb to history. Total: 5
New Minimum: 1.9378567964738296 > 1.9377741254497522
WOLFE (weak): th(258.53216280382605)=1.9377741254497522; dx=-3.9970708110459483E-7 evalInputDelta=1.0333919108229672E-4
Adding measurement 1e1e78b9 to history. Total: 6
New Minimum: 1.9377741254497522 > 1.9372574911394032
WOLFE (weak): th(1551.1929768229563)=1.9372574911394032; dx=-3.9962760245678594E-7 evalInputDelta=6.199735014313923E-4
Adding measurement 7762fe14 to history. Total: 7
New Minimum: 1.9372574911394032 > 1.9335407560354732
WOLFE (weak): th(10858.350837760694)=1.9335407560354732; dx=-3.9905563435861937E-7 evalInputDelta=0.004336708605361395
Adding measurement 44d7a510 to history. Total: 8
New Minimum: 1.9335407560354732 > 1.903386183082749
WOLFE (weak): th(86866.80670208555)=1.903386183082749; dx=-3.944029614306556E-7 evalInputDelta=0.03449128155808556
Adding measurement 5d9dfab2 to history. Total: 9
New Minimum: 1.903386183082749 > 1.643697041217867
END: th(781801.26031877)=1.643697041217867; dx=-3.5347174025816194E-7 evalInputDelta=0.29418042342296746
Fitness changed from 1.9378774646408345 to 1.643697041217867
Iteration 1 complete. Error: 1.643697041217867 Total: 21.9898; Orientation: 1.1334; Line Search: 18.8632
Non-optimal measurement 1.643697041217867 < 1.643697041217867. Total: 10
Rejected: LBFGS Orientation magnitude: 4.633e+03, gradient 5.601e-04, dot -0.960; [22f65f95-82cd-48b7-85a0-f93ebd319cfe = 1.000/1.000e+00, e08bad6b-585c-450e-b4cb-bb4c1883fe5e = 1.000/1.000e+00, 44043ecc-3e08-4802-ae59-3688c15e01bb = 1.000/1.000e+00, 1a04f22b-5b69-4dd6-9310-9047bab1bbe3 = 1.000/1.000e+00, b66ee0fa-5be0-4fdb-a3f0-c1dc12a46105 = 1.000/1.000e+00]
Orientation rejected. Popping history element from 1.643697041217867, 1.903386183082749, 1.9335407560354732, 1.9372574911394032, 1.9377741254497522, 1.9378567964738296, 1.9378722975836713, 1.9378757422873054, 1.9378766034639274, 1.9378774646408345
Rejected: LBFGS Orientation magnitude: 4.633e+03, gradient 5.601e-04, dot -0.960; [1a04f22b-5b69-4dd6-9310-9047bab1bbe3 = 1.000/1.000e+00, 22f65f95-82cd-48b7-85a0-f93ebd319cfe = 1.000/1.000e+00, b66ee0fa-5be0-4fdb-a3f0-c1dc12a46105 = 1.000/1.000e+00, 44043ecc-3e08-4802-ae59-3688c15e01bb = 1.000/1.000e+00, e08bad6b-585c-450e-b4cb-bb4c1883fe5e = 1.000/1.000e+00]
Orientation rejected. Popping history element from 1.643697041217867, 1.903386183082749, 1.9335407560354732, 1.9372574911394032, 1.9377741254497522, 1.9378567964738296, 1.9378722975836713, 1.9378757422873054, 1.9378766034639274
Rejected: LBFGS Orientation magnitude: 4.633e+03, gradient 5.601e-04, dot -0.960; [b66ee0fa-5be0-4fdb-a3f0-c1dc12a46105 = 1.000/1.000e+00, 22f65f95-82cd-48b7-85a0-f93ebd319cfe = 1.000/1.000e+00, e08bad6b-585c-450e-b4cb-bb4c1883fe5e = 1.000/1.000e+00, 44043ecc-3e08-4802-ae59-3688c15e01bb = 1.000/1.000e+00, 1a04f22b-5b69-4dd6-9310-9047bab1bbe3 = 1.000/1.000e+00]
Orientation rejected. Popping history element from 1.643697041217867, 1.903386183082749, 1.9335407560354732, 1.9372574911394032, 1.9377741254497522, 1.9378567964738296, 1.9378722975836713, 1.9378757422873054
Rejected: LBFGS Orientation magnitude: 4.633e+03, gradient 5.601e-04, dot -0.960; [b66ee0fa-5be0-4fdb-a3f0-c1dc12a46105 = 1.000/1.000e+00, 44043ecc-3e08-4802-ae59-3688c15e01bb = 1.000/1.000e+00, 22f65f95-82cd-48b7-85a0-f93ebd319cfe = 1.000/1.000e+00, e08bad6b-585c-450e-b4cb-bb4c1883fe5e = 1.000/1.000e+00, 1a04f22b-5b69-4dd6-9310-9047bab1bbe3 = 1.000/1.000e+00]
Orientation rejected. Popping history element from 1.643697041217867, 1.903386183082749, 1.9335407560354732, 1.9372574911394032, 1.9377741254497522, 1.9378567964738296, 1.9378722975836713
Rejected: LBFGS Orientation magnitude: 4.633e+03, gradient 5.601e-04, dot -0.960; [e08bad6b-585c-450e-b4cb-bb4c1883fe5e = 1.000/1.000e+00, b66ee0fa-5be0-4fdb-a3f0-c1dc12a46105 = 1.000/1.000e+00, 1a04f22b-5b69-4dd6-9310-9047bab1bbe3 = 1.000/1.000e+00, 22f65f95-82cd-48b7-85a0-f93ebd319cfe = 1.000/1.000e+00, 44043ecc-3e08-4802-ae59-3688c15e01bb = 1.000/1.000e+00]
Orientation rejected. Popping history element from 1.643697041217867, 1.903386183082749, 1.9335407560354732, 1.9372574911394032, 1.9377741254497522, 1.9378567964738296
Rejected: LBFGS Orientation magnitude: 4.634e+03, gradient 5.601e-04, dot -0.960; [b66ee0fa-5be0-4fdb-a3f0-c1dc12a46105 = 1.000/1.000e+00, e08bad6b-585c-450e-b4cb-bb4c1883fe5e = 1.000/1.000e+00, 44043ecc-3e08-4802-ae59-3688c15e01bb = 1.000/1.000e+00, 22f65f95-82cd-48b7-85a0-f93ebd319cfe = 1.000/1.000e+00, 1a04f22b-5b69-4dd6-9310-9047bab1bbe3 = 1.000/1.000e+00]
Orientation rejected. Popping history element from 1.643697041217867, 1.903386183082749, 1.9335407560354732, 1.9372574911394032, 1.9377741254497522
Rejected: LBFGS Orientation magnitude: 4.636e+03, gradient 5.601e-04, dot -0.960; [22f65f95-82cd-48b7-85a0-f93ebd319cfe = 1.000/1.000e+00, b66ee0fa-5be0-4fdb-a3f0-c1dc12a46105 = 1.000/1.000e+00, e08bad6b-585c-450e-b4cb-bb4c1883fe5e = 1.000/1.000e+00, 1a04f22b-5b69-4dd6-9310-9047bab1bbe3 = 1.000/1.000e+00, 44043ecc-3e08-4802-ae59-3688c15e01bb = 1.000/1.000e+00]
Orientation rejected. Popping history element from 1.643697041217867, 1.903386183082749, 1.9335407560354732, 1.9372574911394032
LBFGS Accumulation History: 3 points
Removed measurement 5d9dfab2 to history. Total: 9
Removed measurement 44d7a510 to history. Total: 8
Removed measurement 7762fe14 to history. Total: 7
Removed measurement 1e1e78b9 to history. Total: 6
Removed measurement 7d0967fb to history. Total: 5
Removed measurement 2062ce9e to history. Total: 4
Removed measurement c14833c to history. Total: 3
Adding measurement 3a382ae6 to history. Total: 3
th(0)=1.643697041217867;dx=-3.137109421927425E-7
Adding measurement 7bc2c93a to history. Total: 4
New Minimum: 1.643697041217867 > 1.1715536994054105
END: th(1684339.7559414052)=1.1715536994054105; dx=-2.489960547589457E-7 evalInputDelta=0.47214334181245654
Fitness changed from 1.643697041217867 to 1.1715536994054105
Iteration 2 complete. Error: 1.1715536994054105 Total: 319.2168; Orientation: 315.1127; Line Search: 3.4295
Final threshold in iteration 2: 1.1715536994054105 (> 0.0) after 341.207s (< 30.000s)

Returns

    1.1715536994054105

This training apply resulted in the following configuration:

TrainingTester.java:610 executed in 0.00 seconds (0.000 gc):

    RefList<double[]> state = network.state();
    assert state != null;
    String description = state.stream().map(RefArrays::toString).reduce((a, b) -> a + "\n" + b)
        .orElse("");
    state.freeRef();
    return description;

Returns

    

And regressed input:

TrainingTester.java:622 executed in 0.00 seconds (0.000 gc):

    return RefArrays.stream(RefUtil.addRef(data)).flatMap(x -> {
      return RefArrays.stream(x);
    }).limit(1).map(x -> {
      String temp_18_0015 = x.prettyPrint();
      x.freeRef();
      return temp_18_0015;
    }).reduce((a, b) -> a + "\n" + b).orElse("");

Returns

    [
    	[ [ -0.6828081336847814, -1.1945248929836494, -0.19466697333156918 ], [ 1.0674304635903553, 1.6219997252001204, -0.9694301887904756 ], [ -0.7853911801822179, -0.24232411948645852, -0.4682847003313237 ], [ -1.3441135990579913, 0.004049390560075916, -1.6239357915020847 ], [ -1.6081648258075254, 0.36243176290218015, 0.06573306290534539 ], [ 0.1360379109824055, -0.15312158061273007, -0.8829163303696753 ], [ -0.7092974977868174, -0.844718695985819, -1.4579838062273638 ], [ 1.442696228777407, 0.6410725429045867, 0.6402312283180066 ], ... ],
    	[ [ 1.0471157252096648, 0.4109099772588487, -1.2260257024685135 ], [ -0.5100597622909322, -0.32689116830834497, -0.015049069400722911 ], [ 1.3639112912369742, 1.675465463137368, 0.8166232456256581 ], [ -0.014578566115000968, 0.8311962774681745, -1.3726177113531735 ], [ 1.3748646053943405, 1.3053710369106333, -0.9802356423049742 ], [ 1.4312956316995213, 1.2445811549405041, 0.6761232133599744 ], [ 1.1004259541724413, 0.528671901451442, -0.16509785562388318 ], [ -0.8190635118605734, -0.8413148190985503, -1.5116216690408766 ], ... ],
    	[ [ -0.557501201222657, 0.9153565690325208, 1.0741446321901362 ], [ 0.21366317193611242, -1.657103062388039, -0.4205601095480733 ], [ -0.7390006596294707, -0.41875549953061364, 0.5657561591600843 ], [ -0.6229963527903339, -0.6029546976699254, 0.3499510504602592 ], [ -0.032610324514615, 0.9241596674188485, 1.0204506570957665 ], [ -0.5231571843928298, 0.5968136893357718, -1.3736565049429421 ], [ -1.4130722241064506, -0.048299152391856844, 0.16537137649830727 ], [ 1.0473679408466574, 1.310030005046233, -0.5013979458928902 ], ... ],
    	[ [ 0.8877072959587557, 0.1765713625726667, -0.9602786585314226 ], [ 1.4181420125787516, -0.5429045747521465, 1.896762562173395 ], [ -1.136603739233572, 1.5234726966013141, 0.6291310426322578 ], [ -0.6127002085016346, 1.6721161713934174, 1.8885840371082172 ], [ 0.8862147059249814, -0.35901464177468273, 0.7007999358497015 ], [ -0.6528778464216725, 0.8388472429554625, -1.20196939653379 ], [ 0.6688196723000488, -1.0777756367421265, -0.8030440355579221 ], [ -1.5103891330700954, -0.18724835940088255, -1.1663625075290218 ], ... ],
    	[ [ -0.012332090330015588, 1.337712789935979, 0.9786193003940367 ], [ 0.08382184582825183, -0.6561137786009328, 0.5922919327726809 ], [ 1.0004159452786308, -0.24617871286768808, -0.9142372324109425 ], [ -0.1336738791119307, 1.2402437241793425, 0.2894301549325881 ], [ -0.7341689400193481, -0.3073768401701652, -1.1104542198104865 ], [ -1.423512206677743, -1.2959847261431292, -0.9805030671791277 ], [ 0.6463573795656523, -1.5459340398518877, 0.9115766602862354 ], [ 0.3954351043042579, -0.9648779624165285, -0.06255714188772946 ], ... ],
    	[ [ -0.5326941206584996, -0.30792979282841915, 1.0526239134869186 ], [ -0.5272848455077297, 0.7069082584794417, 0.02437658702828796 ], [ -0.1103563740229449, 1.0260236674137715, -0.13166729339082656 ], [ -1.3221456273107843, -0.35915540101442134, -0.8226989716747943 ], [ 1.4091347484016328, -0.4253485781010782, -0.31578617030055467 ], [ 1.003845179090799, -0.5736136699999437, 0.9177684909091446 ], [ 0.8736285579274305, -0.8500047739330558, 0.6163762160056253 ], [ -1.870115655684256, -0.9878330038818467, -0.5180513404338971 ], ... ],
    	[ [ -0.23208096925191146, 1.6697699014670933, 0.9463110677848181 ], [ 1.0688714147143747, 1.5029695583629923, 0.7401590269226331 ], [ 0.6316829138455305, 1.2205932422061707, -0.36027615605170116 ], [ 1.523743575988433, -0.11621311921800628, 0.48046954322957325 ], [ -1.4726979192002279, 0.9256047987160175, 0.7510931204842104 ], [ -0.8054514824011086, -1.608845665843007, -1.5057028517558844 ], [ 0.12715945269507817, 2.128239792853311, 0.5086007544516106 ], [ -0.7470699289812793, 0.6121566942117794, 1.4709132347695 ], ... ],
    	[ [ -0.7629150417793068, -0.8416361382639415, -1.3274488199567516 ], [ 0.5832333682964151, 0.8930121692203581, 0.03975446248322666 ], [ -0.3588925242626068, 0.31530819146950906, 0.9995843327930978 ], [ 1.3108753258198798, -1.0592017632857709, 0.8483264374658913 ], [ 1.2285017435626513, 0.15965132648609393, -0.8081530700487451 ], [ 0.10594968272036492, -1.1083095913743235, 1.1263599086539586 ], [ 0.5996584502911019, 1.8267249759894542, 0.5616165737194438 ], [ 0.364629906092407, -0.8364364463076998, -1.6801934597847072 ], ... ],
    	...
    ]

To produce the following output:

TrainingTester.java:633 executed in 0.08 seconds (0.000 gc):

    Result[] array = ConstantResult.batchResultArray(pop(RefUtil.addRef(data)));
    @Nullable
    Result eval = layer.eval(array);
    assert eval != null;
    TensorList tensorList = Result.getData(eval);
    String temp_18_0016 = tensorList.stream().limit(1).map(x -> {
      String temp_18_0017 = x.prettyPrint();
      x.freeRef();
      return temp_18_0017;
    }).reduce((a, b) -> a + "\n" + b).orElse("");
    tensorList.freeRef();
    return temp_18_0016;

Returns

    [
    	[ [ -1.1720951834327438, -1.6838119427316118, -0.6839540230795316 ], [ -1.0548792497477466, -0.5003099881379816, -3.0917399021285776 ], [ -1.4096267357329935, -0.866559675037234, -1.0925202558820992 ], [ -1.7238845515401946, -0.3757215619221274, -2.003706743984288 ], [ -2.6032724898913786, -0.6326759011816729, -0.9293746011785077 ], [ -0.7466239579864282, -1.0357834495815639, -1.765578199338509 ], [ -0.8528547434536922, -0.9882759416526937, -1.6015410518942386 ], [ -0.6401802332420021, -1.4418039191148224, -1.4426452337014024 ], ... ],
    	[ [ -0.48998090203228295, -1.126186649983099, -2.7631223297104612 ], [ -1.3458714161483343, -1.1627028221657472, -0.8508607232581251 ], [ -1.079790380718585, -0.7682362088181911, -1.627078426329901 ], [ -1.2773013887019293, -0.431526545118754, -2.635340533940102 ], [ -0.706926866729856, -0.7764204352135633, -3.0620271144291706 ], [ -0.8327396211481692, -1.0194540979071864, -1.5879120394877162 ], [ -0.6133601045606272, -1.1851141572816264, -1.8788839143569516 ], [ -0.9075684130202821, -0.9298197202582591, -1.6001265702005854 ], ... ],
    	[ [ -2.348892539203549, -0.8760347689483713, -0.7172467057907559 ], [ -0.5213814372927584, -2.3921476716169097, -1.155604718776944 ], [ -1.8024121686335488, -1.4821670085346916, -0.49765534984399384 ], [ -1.5402967840374835, -1.520255128917075, -0.5673493807868903 ], [ -1.8671275785458739, -0.9103575866124104, -0.8140665969354923 ], [ -1.5022905802545319, -0.3823197065259303, -2.3527899008046442 ], [ -2.2785218825613036, -0.91374881084671, -0.7000782819565459 ], [ -0.9214363786432467, -0.6587743144436711, -2.4702022653827944 ], ... ],
    	[ [ -0.499950831609393, -1.211086764995482, -2.3479367860995715 ], [ -1.0132568932373596, -2.9743034805682576, -0.5346363436427162 ], [ -3.0513206160420223, -0.39124418020713647, -1.2855858341761928 ], [ -3.136451649691802, -0.85163526979675, -0.6351674040819502 ], [ -0.7507717845906576, -1.9960011322903217, -0.9361865546659376 ], [ -1.7954577371825586, -0.30373264780542364, -2.3445492872946763 ], [ -0.33922841250008173, -2.085823721542257, -1.8110921203580528 ], [ -1.8190200488791997, -0.49587927520998687, -1.474993423338126 ], ... ],
    	[ [ -2.0217322830326205, -0.6716874027666258, -1.0307808923085682 ], [ -1.1441877748229694, -1.884123399252154, -0.6357176878785403 ], [ -0.36107777197965585, -1.6076724301259746, -2.275730949669229 ], [ -1.8683332737729863, -0.4944156704817131, -1.4452292397284674 ], [ -1.1689901474321525, -0.7421980475829696, -1.545275427223291 ], [ -1.3065481616492698, -1.1790206811146557, -0.8635390221506544 ], [ -0.8818555558043746, -3.0741469752219146, -0.6166362750837915 ], [ -0.6361178975671022, -1.9964309642878886, -1.0941101437590897 ], ... ],
    	[ [ -1.9647138209674717, -1.7399494931373911, -0.37939578682205344 ], [ -1.8199808597606686, -0.5857877557734972, -1.268319427224651 ], [ -1.6281388760757487, -0.49175883463903225, -1.6494497954436302 ], [ -1.6615230113231627, -0.6985327850267996, -1.1620763556871725 ], [ -0.2910886138171127, -2.1255719403198237, -2.0160095325193 ], [ -0.753311952143161, -2.3307708012339035, -0.8393886403248154 ], [ -0.6686443259703054, -2.3922776578307916, -0.9258966678921107 ], [ -1.985378899366831, -1.103096247564422, -0.6333145841164723 ], ... ],
    	[ [ -2.3931043713011335, -0.4912535005821288, -1.214712334264404 ], [ -1.1827758407384679, -0.7486776970898503, -1.5114882285302094 ], [ -1.1546375464392717, -0.5657272180786315, -2.1465966163365033 ], [ -0.4358570873139269, -2.0758137825203664, -1.4791311200727866 ], [ -3.056205602964047, -0.6579028850478014, -0.8324145632796085 ], [ -0.6648850102205502, -1.4682791936624486, -1.365136379575326 ], [ -2.2886318789802154, -0.28755153882198226, -1.907190577223683 ], [ -2.6448946266690783, -1.2856680034760193, -0.42691146291829885 ], ... ],
    	[ [ -0.9134558649579925, -0.9921769614426271, -1.4779896431354373 ], [ -1.0797175716648715, -0.7699387707409284, -1.6231964774780598 ], [ -1.9246466959915312, -1.2504459802594152, -0.5661698389358265 ], [ -0.5441540942691303, -2.914231183374781, -1.0067029826231186 ], [ -0.38788987817948395, -1.4567402952560413, -2.4245446917908806 ], [ -1.4039531504333147, -2.618212424528003, -0.38354292449972105 ], [ -1.6815503253368598, -0.4544837996385076, -1.719592201908518 ], [ -0.35786760664821693, -1.5589339590483235, -2.402690972525331 ], ... ],
    	...
    ]

TrainingTester.java:432 executed in 0.16 seconds (0.000 gc):

    return TestUtil.compare(title + " vs Iteration", runs);
Logging
Plotting range=[1.0, -1.2555785765005154], [4.0, 0.2158217733913315]; valueStats=DoubleSummaryStatistics{count=7, sum=6.361106, min=0.055516, average=0.908729, max=1.643697}
Plotting 4 points for GD
Only 1 points for CjGD
Plotting 2 points for LBFGS

Returns

Result

TrainingTester.java:435 executed in 0.01 seconds (0.000 gc):

    return TestUtil.compareTime(title + " vs Time", runs);
Logging
Plotting range=[0.0, -1.2555785765005154], [319.217, 0.2158217733913315]; valueStats=DoubleSummaryStatistics{count=7, sum=6.361106, min=0.055516, average=0.908729, max=1.643697}
Plotting 4 points for GD
Only 1 points for CjGD
Plotting 2 points for LBFGS

Returns

Result

Results

TrainingTester.java:255 executed in 0.00 seconds (0.000 gc):

    return grid(inputLearning, modelLearning, completeLearning);

Returns

Result

TrainingTester.java:258 executed in 0.00 seconds (0.000 gc):

    return new ComponentResult(null == inputLearning ? null : inputLearning.value,
        null == modelLearning ? null : modelLearning.value, null == completeLearning ? null : completeLearning.value);

Returns

    {"input":{ "LBFGS": { "type": "NonConverged", "value": 1.1715536994054105 }, "CjGD": { "type": "NonConverged", "value": 0.11156816461988146 }, "GD": { "type": "NonConverged", "value": 0.055516416269176506 } }, "model":null, "complete":null}

LayerTests.java:425 executed in 0.00 seconds (0.000 gc):

    throwException(exceptions.addRef());

Results

detailsresult
{"input":{ "LBFGS": { "type": "NonConverged", "value": 1.1715536994054105 }, "CjGD": { "type": "NonConverged", "value": 0.11156816461988146 }, "GD": { "type": "NonConverged", "value": 0.055516416269176506 } }, "model":null, "complete":null}OK
  {
    "result": "OK",
    "performance": {
      "execution_time": "425.118",
      "gc_time": "11.057"
    },
    "created_on": 1586743158880,
    "file_name": "trainingTest",
    "report": {
      "simpleName": "PixelLog",
      "canonicalName": "com.simiacryptus.mindseye.layers.cudnn.SoftmaxLayerTest.PixelLog",
      "link": "https://github.com/SimiaCryptus/mindseye-cudnn/tree/59d5b3318556370acb2d83ee6ec123ce0fc6974f/src/test/java/com/simiacryptus/mindseye/layers/cudnn/SoftmaxLayerTest.java",
      "javaDoc": ""
    },
    "training_analysis": {
      "input": {
        "LBFGS": {
          "type": "NonConverged",
          "value": 1.1715536994054105
        },
        "CjGD": {
          "type": "NonConverged",
          "value": 0.11156816461988146
        },
        "GD": {
          "type": "NonConverged",
          "value": 0.055516416269176506
        }
      }
    },
    "archive": "s3://code.simiacrypt.us/tests/com/simiacryptus/mindseye/layers/cudnn/SoftmaxActivationLayer/PixelLog/trainingTest/202004135918",
    "id": "aa3b29fb-8456-4362-acb5-ed2d2a380b30",
    "report_type": "Components",
    "display_name": "Comparative Training",
    "target": {
      "simpleName": "SoftmaxActivationLayer",
      "canonicalName": "com.simiacryptus.mindseye.layers.cudnn.SoftmaxActivationLayer",
      "link": "https://github.com/SimiaCryptus/mindseye-cudnn/tree/59d5b3318556370acb2d83ee6ec123ce0fc6974f/src/main/java/com/simiacryptus/mindseye/layers/cudnn/SoftmaxActivationLayer.java",
      "javaDoc": ""
    }
  }