1. Test Modules
  2. Training Characteristics
    1. Input Learning
      1. Gradient Descent
      2. Conjugate Gradient Descent
      3. Limited-Memory BFGS
    2. Results
  3. Results

Subreport: Logs for com.simiacryptus.ref.lang.ReferenceCountingBase

Test Modules

Using Seed 9201533621236934656

Training Characteristics

Input Learning

In this apply, we use a network to learn this target input, given it's pre-evaluated output:

TrainingTester.java:332 executed in 0.02 seconds (0.000 gc):

    return RefArrays.stream(RefUtil.addRef(input_target)).flatMap(RefArrays::stream).map(x -> {
      try {
        return x.prettyPrint();
      } finally {
        x.freeRef();
      }
    }).reduce((a, b) -> a + "\n" + b).orElse("");

Returns

    [
    	[ [ -1.984, 1.576, -1.3 ], [ 1.068, -1.092, 0.552 ], [ 1.96, 0.688, 0.092 ], [ -0.676, 1.708, 1.396 ], [ -0.612, 0.604, -0.504 ], [ 0.528, 1.292, 1.624 ], [ 1.984, -1.14, -1.004 ], [ 1.516, -0.2, 1.84 ], ... ],
    	[ [ -1.948, -1.036, -1.564 ], [ -0.204, -1.588, -0.996 ], [ -0.592, -1.272, -1.904 ], [ 1.972, -0.868, -1.092 ], [ -0.54, 1.072, 0.644 ], [ 0.152, 0.328, -1.728 ], [ -1.988, -1.928, -1.212 ], [ 0.436, -0.176, 0.192 ], ... ],
    	[ [ 1.388, -1.52, -0.936 ], [ -0.008, -1.212, 0.7 ], [ 1.56, -1.568, 0.504 ], [ 0.468, -1.396, 1.348 ], [ -1.244, -0.848, -0.104 ], [ 1.504, -1.272, -1.064 ], [ -1.636, -1.088, -0.584 ], [ 0.88, 1.696, -0.14 ], ... ],
    	[ [ -1.24, -1.172, 0.128 ], [ 0.06, -0.444, 1.116 ], [ -0.168, -1.644, -0.712 ], [ 0.832, -0.88, -0.736 ], [ -0.296, -1.044, -1.404 ], [ 0.244, -1.568, -0.408 ], [ 1.372, 0.996, 0.396 ], [ 1.236, -1.372, -0.304 ], ... ],
    	[ [ 1.344, -1.564, -1.284 ], [ -0.172, 1.976, -1.716 ], [ -0.124, 0.1, -0.912 ], [ -0.348, 1.376, 0.64 ], [ -0.208, -0.924, -0.304 ], [ 0.008, -1.384, 0.104 ], [ 0.924, -0.8, -1.876 ], [ 0.304, 1.892, -0.328 ], ... ],
    	[ [ 1.496, 1.44, -0.192 ], [ -1.6, 0.42, -1.06 ], [ -1.28, 0.164, -0.4 ], [ -1.576, -1.988, -0.908 ], [ -1.656, 0.924, 1.012 ], [ -1.384, -1.48, -1.648 ], [ -0.14, -0.12, 1.704 ], [ 1.736, 1.668, -0.64 ], ... ],
    	[ [ 1.256, 0.836, 0.844 ], [ 0.172, 1.04, -1.884 ], [ -0.428, 1.348, 1.068 ], [ -1.236, -0.236, 0.612 ], [ 0.016, 0.596, -0.82 ], [ 0.976, -1.276, -1.812 ], [ 1.364, -0.66, 0.984 ], [ 1.436, -0.476, -0.76 ], ... ],
    	[ [ 1.136, 0.612, -0.652 ], [ -0.636, 1.4, -0.872 ], [ 0.876, -1.096, -1.216 ], [ 1.384, 0.388, 1.384 ], [ 0.492, 1.96, -0.988 ], [ 1.94, -0.176, -1.356 ], [ -0.484, -1.836, -1.276 ], [ 0.104, -1.94, 1.52 ], ... ],
    	...
    ]
    [
    	[ [ -0.668, 0.076, 1.452 ], [ 0.14, 0.768, -0.552 ], [ -0.976, 1.52, 0.644 ], [ -0.188, 0.408, -0.724 ], [ 0.152, -0.024, -0.092 ], [ -0.656, 0.9, -1.696 ], [ 0.536, -1.028, -1.584 ], [ 0.436, 0.92, 1.768 ], ... ],
    	[ [ -0.2, -1.196, -0.564 ], [ -1.02, -0.504, 1.856 ], [ 1.376, 0.2, 0.868 ], [ -1.78, 1.992, 1.296 ], [ 0.476, -1.68, -0.568 ], [ -1.86, 0.24, 0.66 ], [ -0.716, 0.904, 0.036 ], [ -1.288, 1.64, -0.144 ], ... ],
    	[ [ -0.108, -1.984, -0.1 ], [ -0.596, 1.456, -0.628 ], [ 1.088, -1.784, 0.556 ], [ -1.644, -1.364, 0.108 ], [ 0.452, 0.464, 0.968 ], [ -0.244, 1.284, -0.556 ], [ 1.728, -0.648, -0.684 ], [ 0.164, 0.612, 1.404 ], ... ],
    	[ [ 0.908, -1.032, 1.036 ], [ 0.304, 0.856, 0.74 ], [ -0.432, -0.596, 1.264 ], [ -1.38, -1.424, 0.208 ], [ -0.572, -0.78, -1.784 ], [ -1.6, -0.128, 0.14 ], [ 0.216, 1.808, -1.344 ], [ -1.332, 0.972, 0.472 ], ... ],
    	[ [ -0.9, -0.136, -0.76 ], [ -0.604, 0.292, 0.668 ], [ -1.432, -1.252, -1.968 ], [ -1.572, 1.528, -0.404 ], [ 0.024, 1.0, -1.34 ], [ -0.308, -1.252, 1.992 ], [ 1.552, -1.764, 1.728 ], [ -0.972, -0.916, 1.356 ], ... ],
    	[ [ -1.516, -0.336, 1.096 ], [ -1.088, 0.38, 1.624 ], [ -1.816, 1.612, 1.284 ], [ -1.18, -1.484, -1.224 ], [ 1.396, -1.996, 1.62 ], [ -1.308, 0.064, 0.372 ], [ 0.508, -1.512, -0.252 ], [ -0.352, -0.704, -0.776 ], ... ],
    	[ [ 1.168, 0.088, 0.32 ], [ 1.16, -0.876, 1.128 ], [ 0.236, 1.264, -0.704 ], [ -1.952, 0.436, 1.552 ], [ 1.996, 0.884, 0.304 ], [ 0.556, -1.884, 1.34 ], [ 1.128, -1.52, -0.96 ], [ -0.308, 1.508, -0.792 ], ... ],
    	[ [ 0.828, 1.7, -0.752 ], [ 1.084, 0.04, -0.048 ], [ -0.712, 1.672, 1.88 ], [ 0.236, 1.34, 1.3 ], [ 0.62, 0.988, -0.456 ], [ 1.444, 0.328, -0.052 ], [ -1.752, -1.724, -0.844 ], [ -0.784, -1.86, 0.108 ], ... ],
    	...
    ]
    [
    	[ [ 0.02, -1.208, 1.512 ], [ 1.912, -0.172, 0.648 ], [ -1.532, -1.1, -1.004 ], [ 1.3, 0.864, -1.788 ], [ 0.644, 1.596, -0.112 ], [ -1.756, -0.184, 1.472 ], [ 0.66, -0.28, 0.164 ], [ -0.06, 1.384, 1.88 ], ... ],
    	[ [ 1.964, -0.076, -0.36 ], [ 1.764, -1.392, -0.54 ], [ 0.588, -0.104, 0.716 ], [ -1.54, 0.136, -1.272 ], [ -0.78, -1.2, 0.26 ], [ 0.832, -0.348, -0.828 ], [ 1.68, 1.676, 1.012 ], [ -0.512, -1.892, -1.716 ], ... ],
    	[ [ -1.812, -1.656, -0.036 ], [ 1.3, -1.04, -1.628 ], [ -1.604, 0.856, 1.8 ], [ 1.26, 0.696, -0.696 ], [ 1.888, -0.22, -0.86 ], [ -1.372, 0.504, 0.4 ], [ 1.128, 1.136, -0.16 ], [ 0.66, 1.22, -1.74 ], ... ],
    	[ [ -0.904, 1.892, -0.948 ], [ -0.556, 0.404, -1.04 ], [ 1.604, -0.968, 0.664 ], [ 1.868, 1.992, 1.144 ], [ 1.816, 0.896, -0.852 ], [ -1.004, 1.388, 0.932 ], [ -1.196, -1.716, 1.476 ], [ -1.356, -0.688, -0.236 ], ... ],
    	[ [ -0.624, -1.584, -1.876 ], [ 1.416, -1.176, -0.044 ], [ 0.704, -1.34, 1.924 ], [ 0.78, -1.344, 0.34 ], [ -1.032, -1.812, -1.46 ], [ -0.028, -0.188, 0.196 ], [ 0.224, -1.968, 0.424 ], [ 0.06, -0.18, -1.508 ], ... ],
    	[ [ -0.776, -1.42, 1.652 ], [ -0.348, 1.188, 0.244 ], [ -0.228, -0.928, 1.332 ], [ 1.28, 1.468, 1.064 ], [ -0.372, -1.132, -0.936 ], [ 1.46, 0.964, 0.188 ], [ 1.464, 0.896, -0.5 ], [ 1.856, 0.404, -1.756 ], ... ],
    	[ [ 1.0, 1.076, -1.932 ], [ 1.652, 1.296, -1.024 ], [ -0.472, 0.984, -0.36 ], [ -1.504, -1.264, -1.504 ], [ -0.984, -0.504, -1.36 ], [ 0.88, 1.808, -1.128 ], [ -1.396, -1.776, 1.312 ], [ 0.504, -0.156, -1.756 ], ... ],
    	[ [ -1.228, -0.76, 1.812 ], [ -1.464, 0.236, 0.24 ], [ -1.208, 0.2, -1.996 ], [ -1.76, 1.608, 1.364 ], [ 0.844, 1.56, 1.056 ], [ 1.88, -1.184, 0.056 ], [ -0.208, -0.928, 0.088 ], [ -0.128, 0.348, 0.832 ], ... ],
    	...
    ]
    [
    	[ [ -1.668, 1.996, 0.044 ], [ -1.264, -1.772, 1.164 ], [ -1.332, 1.412, -1.188 ], [ 0.404, 1.016, 0.82 ], [ -0.636, 1.872, 1.504 ], [ 0.788, 0.86, 0.948 ], [ -1.628, -1.136, -1.812 ], [ 0.484, 1.176, 0.364 ], ... ],
    	[ [ 0.12, -1.084, -0.764 ], [ -1.128, 0.332, 0.58 ], [ -1.112, 1.58, -1.036 ], [ 1.424, 1.892, 1.292 ], [ 0.904, 0.448, 0.904 ], [ 0.168, 0.82, -0.768 ], [ 1.716, 1.988, -1.944 ], [ -1.908, 1.304, 0.748 ], ... ],
    	[ [ -1.876, 0.056, -0.224 ], [ 0.372, -1.964, -0.656 ], [ 0.34, 1.236, 0.76 ], [ 1.516, -1.34, -1.284 ], [ -1.22, 1.836, -1.924 ], [ 0.964, 1.448, -0.472 ], [ 0.124, 1.668, 0.484 ], [ -1.888, 0.32, -0.024 ], ... ],
    	[ [ -1.308, -0.744, -0.8 ], [ -1.048, 1.692, -0.516 ], [ 1.644, 1.868, -0.148 ], [ 1.18, -0.08, 1.844 ], [ 0.908, -1.28, -0.056 ], [ -1.624, -1.908, -1.752 ], [ 1.184, 1.8, -1.24 ], [ -1.684, 1.704, 1.188 ], ... ],
    	[ [ -0.98, 0.892, -0.408 ], [ -1.32, -1.848, 1.496 ], [ 0.328, 0.796, -1.952 ], [ -0.008, 0.192, -1.596 ], [ 1.524, -0.236, 0.588 ], [ 1.604, -1.184, -0.084 ], [ -0.156, 1.924, -0.932 ], [ 0.712, -0.104, -0.684 ], ... ],
    	[ [ 1.86, 0.812, 1.14 ], [ -1.308, 1.296, 1.276 ], [ -0.188, 1.392, -1.564 ], [ -0.296, -1.048, -0.084 ], [ -0.3, 1.752, -0.116 ], [ 1.196, 0.016, -0.58 ], [ 0.256, 1.272, -1.02 ], [ -0.712, 1.816, 0.08 ], ... ],
    	[ [ 0.832, 1.54, -0.724 ], [ -1.516, -0.34, 0.968 ], [ -0.2, 1.984, -0.324 ], [ -0.8, -1.592, 0.728 ], [ -1.076, 1.444, -0.568 ], [ -0.356, 0.532, -0.74 ], [ -0.304, -0.028, 1.196 ], [ -1.576, 1.864, 0.98 ], ... ],
    	[ [ -0.328, -0.392, -1.572 ], [ 0.884, 0.548, -1.372 ], [ 1.912, -0.716, -0.66 ], [ 0.22, 1.78, -0.82 ], [ 0.836, 0.268, 0.788 ], [ -0.696, 1.724, 1.984 ], [ -1.752, -0.096, -1.388 ], [ -1.272, 0.048, -0.72 ], ... ],
    	...
    ]
    [
    	[ [ 0.208, 0.22, -1.676 ], [ -1.924, -0.412, 1.404 ], [ 1.3, -0.352, -1.872 ], [ -0.892, 1.04, 1.248 ], [ -0.44, 1.232, -0.256 ], [ 1.788, -1.572, 1.012 ], [ 0.536, 0.652, -0.348 ], [ 0.552, -0.828, -0.7 ], ... ],
    	[ [ -0.164, -1.868, 0.144 ], [ 1.284, -0.908, 0.648 ], [ -0.688, -1.368, 0.408 ], [ 0.704, -1.084, -0.852 ], [ 0.856, 0.728, -1.072 ], [ -1.28, -1.088, -0.896 ], [ -0.04, 0.412, 1.484 ], [ -0.008, -1.72, 1.16 ], ... ],
    	[ [ 1.748, -1.676, 0.624 ], [ -1.428, 0.704, -1.64 ], [ -1.044, 0.136, 1.928 ], [ 1.528, 0.052, 1.796 ], [ -0.544, 0.728, -1.616 ], [ -1.592, 1.132, 1.384 ], [ -0.396, 1.364, -0.52 ], [ -0.912, -1.552, -0.752 ], ... ],
    	[ [ 1.496, 1.32, 1.052 ], [ 0.476, -1.672, -1.844 ], [ -1.868, -1.312, 0.148 ], [ -1.856, 0.08, 0.896 ], [ 0.144, -1.92, 1.396 ], [ -0.484, -1.072, 0.744 ], [ 0.1, -1.328, 1.288 ], [ 1.012, 0.008, 0.3 ], ... ],
    	[ [ -1.44, -0.404, 1.748 ], [ -0.152, -0.364, 1.548 ], [ -1.276, 2.0, 1.088 ], [ 1.104, -0.232, -1.704 ], [ -1.288, 0.464, 0.784 ], [ 0.712, -1.076, 0.532 ], [ -1.6, -0.7, 1.8 ], [ 0.956, -1.188, -1.732 ], ... ],
    	[ [ 0.764, 0.784, 0.304 ], [ 0.984, -0.26, 1.016 ], [ 0.944, -1.448, 1.34 ], [ 1.264, -0.876, -0.076 ], [ -1.768, -0.728, -1.744 ], [ -0.136, -0.144, -0.172 ], [ 1.984, 1.328, -1.368 ], [ -0.764, 1.22, -0.672 ], ... ],
    	[ [ 0.668, -0.104, 1.244 ], [ 1.896, 0.276, -1.568 ], [ -0.036, -1.012, -0.176 ], [ -0.88, 0.252, 1.08 ], [ 0.38, -1.12, 0.852 ], [ 0.316, -0.484, 0.804 ], [ 0.844, 1.696, 0.24 ], [ -0.472, 0.2, 0.748 ], ... ],
    	[ [ 2.0, 1.776, -1.536 ], [ 1.836, 1.124, 0.088 ], [ 1.208, 1.584, -0.836 ], [ 0.652, 1.564, -0.108 ], [ -0.892, -0.748, 1.32 ], [ -1.86, -0.092, 1.472 ], [ 1.292, 0.22, -0.208 ], [ -1.456, 1.34, -0.896 ], ... ],
    	...
    ]

Gradient Descent

First, we train using basic gradient descent method apply weak line search conditions.

TrainingTester.java:480 executed in 30.30 seconds (3.191 gc):

    IterativeTrainer iterativeTrainer = new IterativeTrainer(trainable.addRef());
    try {
      iterativeTrainer.setLineSearchFactory(label -> new ArmijoWolfeSearch());
      iterativeTrainer.setOrientation(new GradientDescent());
      iterativeTrainer.setMonitor(TrainingTester.getMonitor(history));
      iterativeTrainer.setTimeout(30, TimeUnit.SECONDS);
      iterativeTrainer.setMaxIterations(250);
      iterativeTrainer.setTerminateThreshold(0);
      return iterativeTrainer.run();
    } finally {
      iterativeTrainer.freeRef();
    }
Logging
Reset training subject: 3369277036708
Reset training subject: 3370796046742
Constructing line search parameters: GD
th(0)=2.666336795758519;dx=-4.937660732886166E-7
New Minimum: 2.666336795758519 > 2.6663357319718672
WOLFE (weak): th(2.154434690031884)=2.6663357319718672; dx=-4.93765974789842E-7 evalInputDelta=1.0637866516560734E-6
New Minimum: 2.6663357319718672 > 2.6663346681854287
WOLFE (weak): th(4.308869380063768)=2.6663346681854287; dx=-4.937658762910663E-7 evalInputDelta=2.127573090149326E-6
New Minimum: 2.6663346681854287 > 2.6663304130417957
WOLFE (weak): th(12.926608140191302)=2.6663304130417957; dx=-4.937654822959678E-7 evalInputDelta=6.382716723152271E-6
New Minimum: 2.6663304130417957 > 2.666311264937465
WOLFE (weak): th(51.70643256076521)=2.666311264937465; dx=-4.937637093180472E-7 evalInputDelta=2.5530821053720842E-5
New Minimum: 2.666311264937465 > 2.6662091428755783
WOLFE (weak): th(258.53216280382605)=2.6662091428755783; dx=-4.93754253435757E-7 evalInputDelta=1.2765288294058763E-4
New Minimum: 2.6662091428755783 > 2.6655709242980583
WOLFE (weak): th(1551.1929768229563)=2.6655709242980583; dx=-4.936951541714825E-7 evalInputDelta=7.658714604605876E-4
New Minimum: 2.6655709242980583 > 2.66097800572926
WOLFE (weak): th(10858.350837760694)=2.66097800572926; dx=-4.932696394686533E-7 evalInputDelta=0.005358790029259097
New Minimum: 2.66097800572926 > 2.6236174081968233
WOLFE (weak): th(86866.80670208555)=2.6236174081968233; dx=-4.89794602728945E-7 evalInputDelta=0.042719387561695576
New Minimum: 2.6236174081968233 > 2.2942819104587313
WOLFE (weak): th(781801.26031877)=2.2942819104587313; dx=-4.5802283825160843E-7 evalInputDelta=0.3720548852997876
New Minimum: 2.2942819104587313 > 0.2032727217517815
END: th(7818012.6031877)=0.2032727217517815; dx=-1.363337229185319E-7 evalInputDelta=2.4630640740067373
Fitness changed from 2.666336795758519 to 0.2032727217517815
Iteration 1 complete. Error: 0.2032727217517815 Total: 19.5814; Orientation: 0.7468; Line Search: 16.0529
th(0)=0.2032727217517815;dx=-3.7643096620700105E-8
New Minimum: 0.2032727217517815 > 0.06364934239201818
WOLF (strong): th(1.684339755941405E7)=0.06364934239201818; dx=2.1064092430215497E-8 evalInputDelta=0.13962337935976332
New Minimum: 0.06364934239201818 > 0.009857466474368064
END: th(8421698.779707026)=0.009857466474368064; dx=-8.289502095242375E-9 evalInputDelta=0.19341525527741343
Fitness changed from 0.2032727217517815 to 0.009857466474368064
Iteration 2 complete. Error: 0.009857466474368064 Total: 5.8792; Orientation: 0.7464; Line Search: 4.5337
th(0)=0.009857466474368064;dx=-1.8254567545126162E-9
New Minimum: 0.009857466474368064 > 0.004558092497747795
WOLF (strong): th(1.8144E7)=0.004558092497747795; dx=1.2413105930685819E-9 evalInputDelta=0.0052993739766202695
New Minimum: 0.004558092497747795 > 2.523511417438223E-4
END: th(9072000.0)=2.523511417438223E-4; dx=-2.9207308072201543E-10 evalInputDelta=0.009605115332624242
Fitness changed from 0.009857466474368064 to 2.523511417438223E-4
Iteration 3 complete. Error: 2.523511417438223E-4 Total: 4.8286; Orientation: 0.7383; Line Search: 3.4896
Final threshold in iteration 3: 2.523511417438223E-4 (> 0.0) after 30.290s (< 30.000s)

Returns

    2.523511417438223E-4

This training apply resulted in the following configuration:

TrainingTester.java:610 executed in 0.00 seconds (0.000 gc):

    RefList<double[]> state = network.state();
    assert state != null;
    String description = state.stream().map(RefArrays::toString).reduce((a, b) -> a + "\n" + b)
        .orElse("");
    state.freeRef();
    return description;

Returns

    

And regressed input:

TrainingTester.java:622 executed in 0.01 seconds (0.000 gc):

    return RefArrays.stream(RefUtil.addRef(data)).flatMap(x -> {
      return RefArrays.stream(x);
    }).limit(1).map(x -> {
      String temp_18_0015 = x.prettyPrint();
      x.freeRef();
      return temp_18_0015;
    }).reduce((a, b) -> a + "\n" + b).orElse("");

Returns

    [
    	[ [ -1.9578498371143855, 1.5754941188727485, -1.292878750285614 ], [ 1.0673773770741521, -1.0985764546542691, 0.5412208405962572 ], [ 1.9382860254610523, 0.6720842014580114, 0.08989864762526312 ], [ -0.6773619876502924, 1.6730942022196484, 1.3665421528208181 ], [ -0.6138289548446784, 0.6073855121592983, -0.5088253276753217 ], [ 0.5136796727054969, 1.2798588529459647, 1.5994842222947365 ], [ 1.9648543450301752, -1.1315167626353213, -0.980067931287719 ], [ 1.4970489146945027, -0.18552401697403487, 1.816223587019181 ], ... ],
    	[ [ -1.9265584229911108, -1.017632623687485, -1.5585520493988303 ], [ -0.19575024623251447, -1.5750416603557893, -0.9729240378107599 ], [ -0.5805593037375436, -1.2530878286273681, -1.8808462099450287 ], [ 1.9694316804308771, -0.8534461891083038, -1.0702081975953215 ], [ -0.5369257993036257, 1.0686534017735672, 0.6335321520591811 ], [ 0.1483810042435087, 0.3247312296392982, -1.729323073717427 ], [ -1.955701435721637, -1.9152362300201167, -1.195889631793684 ], [ 0.4370506761873684, -0.19358909765520493, 0.1973701227354387 ], ... ],
    	[ [ 1.3585032388879525, -1.5076253693487718, -0.9447945488276025 ], [ 0.0030904708666668404, -1.186978341167485, 0.6964199181763742 ], [ 1.544667910450994, -1.537180165170526, 0.49462174217941507 ], [ 0.4480371524399997, -1.3810181358467832, 1.3170245094390638 ], [ -1.2462570081061988, -0.8536425202654971, -0.104 ], [ 1.5022877869539182, -1.2732841597845614, -1.0399511894891225 ], [ -1.6187611277405844, -1.0841475206463158, -0.5681231153908769 ], [ 0.879922172134269, 1.6927701435721636, -0.12291678347204653 ], ... ],
    	[ [ -1.212176538001169, -1.1434371732767246, 0.13765065535064344 ], [ 0.06751038904304105, -0.4467628892334503, 1.1206307580109942 ], [ -0.17403165959415215, -1.618044406778713, -0.7159303072194152 ], [ 0.8249565781513449, -0.8622941605461986, -0.7375954712474854 ], [ -0.2803955129209354, -1.017188300255672, -1.3888624801153213 ], [ 0.25201627017029254, -1.5715800818236259, -0.39873848397801154 ], [ 1.3630497954409357, 1.0031990775801172, 0.3736634025352043 ], [ 1.2159204106414032, -1.3573683612425729, -0.2956724183667835 ], ... ],
    	[ [ 1.3427158402154387, -1.529755739078362, -1.2706525210271342 ], [ -0.16592942647298234, 1.9486045912626895, -1.7038199390130992 ], [ -0.13259997916327498, 0.08171045155321609, -0.9080696927805848 ], [ -0.3316561481964909, 1.3737429918938011, 0.6367312296392982 ], [ -0.1907611277405845, -0.924466967194386, -0.2991357583918128 ], [ 0.02111399537567274, -1.3751276233066665, 0.12225063451391842 ], [ 0.9273076842935674, -0.7903882585822222, -1.8758832582014033 ], [ 0.29975838131766075, 1.8625421528208181, -0.30760909917847923 ], ... ],
    	[ [ 1.4648299397747362, 1.4347466190631577, -0.20118368815625745 ], [ -1.5998443442685382, 0.42848323736467847, -1.0664597128556728 ], [ -1.2576244886023389, 0.17392305288070192, -0.39568055345192976 ], [ -1.5448299397747363, -1.97391315630269, -0.8886986892987132 ], [ -1.6520696927805847, 0.9089013940481869, 1.0172922948697076 ], [ -1.3541140995592977, -1.4826461474348538, -1.6397502462325144 ], [ -0.15077915940374287, -0.12778278657309955, 1.6734525627005843 ], [ 1.702067050541286, 1.6405267633969585, -0.6175077468037423 ], ... ],
    	[ [ 1.2625375407214037, 0.8380624384418713, 0.8459456966432749 ], [ 0.17421809417333337, 1.023344836733567, -1.8601846730863152 ], [ -0.41531405788584774, 1.3323955129209355, 1.0669493238126315 ], [ -1.2082932797997656, -0.23790678271040938, 0.6151520285621053 ], [ 0.0016018448397658452, 0.5888398363527484, -0.7944724600402334 ], [ 0.9840162701702925, -1.2743656148196492, -1.7858887510472512 ], [ 1.3447765171644441, -0.6714796101953219, 0.9552036896795317 ], [ 1.411523136227602, -0.4683728691583624, -0.760544795060117 ], ... ],
    	[ [ 1.108176538001169, 0.595344836733567, -0.6588099382514622 ], [ -0.6487248560470178, 1.382410902344795, -0.8619602053207016 ], [ 0.8745212705511111, -1.0893068035471345, -1.2211755530711113 ], [ 1.3619747139981282, 0.36651950905824526, 1.3781239961373097 ], [ 0.4821158610521636, 1.9449403079810523, -0.9631729108318124 ], [ 1.9269638324900582, -0.18961987650292417, -1.343547541483041 ], [ -0.4954017823295908, -1.8133131771394149, -1.2826542825200002 ], [ 0.1104597128556726, -1.9241620293237425, 1.496534898482105 ], ... ],
    	...
    ]

To produce the following output:

TrainingTester.java:633 executed in 0.12 seconds (0.000 gc):

    Result[] array = ConstantResult.batchResultArray(pop(RefUtil.addRef(data)));
    @Nullable
    Result eval = layer.eval(array);
    assert eval != null;
    TensorList tensorList = Result.getData(eval);
    String temp_18_0016 = tensorList.stream().limit(1).map(x -> {
      String temp_18_0017 = x.prettyPrint();
      x.freeRef();
      return temp_18_0017;
    }).reduce((a, b) -> a + "\n" + b).orElse("");
    tensorList.freeRef();
    return temp_18_0016;

Returns

    [
    	[ [ 0.4390416603557893, -1.774667029704561, 0.2689502045590645 ], [ 0.531189181002105, -0.5983656148196491, 0.21658194750011664 ], [ 0.8717972952505263, -1.437421783852865, 0.2929565781513449 ], [ -1.7634307996844443, -0.2367457383167249, 0.9192977877155551 ], [ -1.266488730210526, 0.4050262709319296, 0.5259375615581287 ], [ 1.020911290626199, -1.4691040987976607, -0.2754081559218713 ], [ -1.8117583813176608, 1.1696262500952046, 0.7456488938577777 ], [ 0.44241266383766087, 0.34382170050596506, -0.07809497878245643 ], ... ],
    	[ [ -0.16048961095695896, -0.9369647132364911, 0.38021083983462 ], [ -1.205509508296608, -0.782390020075088, -1.5450100007616372 ], [ 1.7957031972145028, 0.9698063110821055, -1.4913303280561403 ], [ -1.0365819475001168, -1.3743493446493564, -1.1748860046243272 ], [ 0.35207606637286515, -1.6145810667536837, 0.2586687911974268 ], [ 0.9204199181763743, -0.08992305288070193, 0.41808508220444435 ], [ -1.1327312296392982, 1.9567602469941516, -0.36932944730970735 ], [ -0.4491366391382457, 1.552168402916023, 1.283610860671345 ], ... ],
    	[ [ 1.6832588737826897, 1.1513303280561402, -1.023844344268538 ], [ 1.4361556557314619, 0.3882189749197663, 0.9754371732767246 ], [ 0.19337825782058493, 0.16218731532561415, -1.9165511686523973 ], [ 0.695711332299649, 0.24181268467438588, 1.02258920183883 ], [ 0.0726923157064327, 1.214543914313684, 1.6863783620042099 ], [ 0.9286679104509938, -1.6842723975300584, -0.6666769262825731 ], [ 1.9461530134921632, -0.71157832033076, -0.5770262709319296 ], [ -0.5265746931614033, -1.4865502879059644, 1.2056814341983626 ], ... ],
    	[ [ -0.18919006174853792, -0.6311502670692395, -0.7197177058919294 ], [ 0.9522253485120467, 0.9853828699199996, -0.8951194882215203 ], [ 0.06102803242479546, 1.7511874195092392, -1.086590963331696 ], [ -1.6613683612425727, 0.9357049587073686, 0.9748389556063155 ], [ -1.6540850822044444, 0.1573846314128654, 1.1759593245742685 ], [ -0.5567312296392982, -0.5651755530711112, -1.790513135465965 ], [ -1.0817493654860817, -0.809167417985965, 0.933788279418947 ], [ -1.109399140090292, -0.35317555307111115, 0.7375321520591811 ], ... ],
    	[ [ 1.6046986892987132, 0.26406969278058473, -0.7497493654860816 ], [ -1.5649257993036259, 0.9763711076654966, 0.904522151297544 ], [ -1.0038733616233915, 1.2739764754909941, -1.9567149594690056 ], [ 1.4172579930362568, 1.6931412512376602, 0.015625369348771734 ], [ 1.4298823774549705, 0.30066967194385974, 1.6299520702355552 ], [ -1.7161864345791813, 0.619688688537076, 1.1637357375550876 ], [ 1.5108308205211693, -0.3679854913225733, -1.4862244677656138 ], [ -1.1749185449649122, -0.7458905125401168, -0.22217918024046787 ], ... ],
    	[ [ -0.8178271933518125, 1.7487050628909937, -0.006568319569122849 ], [ 0.9363420903106432, 1.1118344476905258, 1.7434941188727484 ], [ -1.0483972744138013, -1.2829638324900583, -1.8498724808769584 ], [ -0.8253213122245611, 1.6628018031663157, 1.4699602053207015 ], [ 1.715701435721637, -0.8465973369239763, -1.4933602261574266 ], [ -0.355189181002105, 0.005978236983859936, 0.09144069626245632 ], [ -0.5884506970240935, -0.851844344268538, 1.7528235661824558 ], [ 0.48958557466947333, 0.7468714959469006, -1.671033525270643 ], ... ],
    	[ [ -0.2575339135520469, 1.3279738332516955, -0.384599979163275 ], [ 0.8180624384418713, 1.9225828282465496, 1.7389864762526313 ], [ -0.4892516194439763, -1.9407149594690056, 0.8609159027256136 ], [ -0.930699570045146, 1.0967376032315788, -1.8188145503508768 ], [ 1.8794470698547368, -0.946255246613333, -1.3893457174799997 ], [ -0.5421547749850291, -0.07308782862736811, 1.4474842222947364 ], [ 0.528045287525146, 0.39495745889777784, -1.692721333061286 ], [ -0.42280993825146207, 1.4815710659920467, 1.4032118247646779 ], ... ],
    	[ [ -0.21693393438877195, -1.7328462099450288, 1.3816181150100584 ], [ -1.4499747139981283, 0.5821855538327483, -0.540288667700351 ], [ 1.676776517164444, -0.3958687495239768, -0.14642079892280713 ], [ -0.10382983559111131, 0.6398054303356725, 1.10045883210924 ], [ -0.2686289965181284, 1.2383429710570761, 0.14046520570152016 ], [ 0.5711665372395319, -1.3627547541483043, 1.9193122963929818 ], [ -1.660946681573333, 1.3472914141232748, -0.8254461891083038 ], [ -1.1890262709319297, -0.9253619876502924, -0.3484832373646785 ], ... ],
    	...
    ]

Conjugate Gradient Descent

First, we use a conjugate gradient descent method, which converges the fastest for purely linear functions.

TrainingTester.java:452 executed in 38.85 seconds (1.997 gc):

    IterativeTrainer iterativeTrainer = new IterativeTrainer(trainable.addRef());
    try {
      iterativeTrainer.setLineSearchFactory(label -> new QuadraticSearch());
      iterativeTrainer.setOrientation(new GradientDescent());
      iterativeTrainer.setMonitor(TrainingTester.getMonitor(history));
      iterativeTrainer.setTimeout(30, TimeUnit.SECONDS);
      iterativeTrainer.setMaxIterations(250);
      iterativeTrainer.setTerminateThreshold(0);
      return iterativeTrainer.run();
    } finally {
      iterativeTrainer.freeRef();
    }
Logging
Reset training subject: 3399973059035
Reset training subject: 3400587662968
Constructing line search parameters: GD
F(0.0) = LineSearchPoint{point=PointSample{avg=2.666336795758519}, derivative=-4.937660732886166E-7}
New Minimum: 2.666336795758519 > 2.6663367957585185
F(1.0E-10) = LineSearchPoint{point=PointSample{avg=2.6663367957585185}, derivative=-4.937660732886166E-7}, evalInputDelta = -4.440892098500626E-16
New Minimum: 2.6663367957585185 > 2.666336795758518
F(7.000000000000001E-10) = LineSearchPoint{point=PointSample{avg=2.666336795758518}, derivative=-4.937660732886166E-7}, evalInputDelta = -8.881784197001252E-16
New Minimum: 2.666336795758518 > 2.6663367957585162
F(4.900000000000001E-9) = LineSearchPoint{point=PointSample{avg=2.6663367957585162}, derivative=-4.937660732886165E-7}, evalInputDelta = -2.6645352591003757E-15
New Minimum: 2.6663367957585162 > 2.6663367957585016
F(3.430000000000001E-8) = LineSearchPoint{point=PointSample{avg=2.6663367957585016}, derivative=-4.937660732886154E-7}, evalInputDelta = -1.7319479184152442E-14
New Minimum: 2.6663367957585016 > 2.6663367957584
F(2.4010000000000004E-7) = LineSearchPoint{point=PointSample{avg=2.6663367957584}, derivative=-4.937660732886065E-7}, evalInputDelta = -1.1901590823981678E-13
New Minimum: 2.6663367957584 > 2.666336795757689
F(1.6807000000000003E-6) = LineSearchPoint{point=PointSample{avg=2.666336795757689}, derivative=-4.937660732885423E-7}, evalInputDelta = -8.30002733209767E-13
New Minimum: 2.666336795757689 > 2.6663367957527093
F(1.1764900000000001E-5) = LineSearchPoint{point=PointSample{avg=2.6663367957527093}, derivative=-4.937660732880793E-7}, evalInputDelta = -5.809575043258519E-12
New Minimum: 2.6663367957527093 > 2.6663367957178545
F(8.235430000000001E-5) = LineSearchPoint{point=PointSample{avg=2.6663367957178545}, derivative=-4.937660732848482E-7}, evalInputDelta = -4.0664360767550534E-11
New Minimum: 2.6663367957178545 > 2.6663367954738724
F(5.764801000000001E-4) = LineSearchPoint{point=PointSample{avg=2.6663367954738724}, derivative=-4.937660732622602E-7}, evalInputDelta = -2.846465285699651E-10
New Minimum: 2.6663367954738724 > 2.666336793765994
F(0.004035360700000001) = LineSearchPoint{point=PointSample{avg=2.666336793765994}, derivative=-4.937660731041224E-7}, evalInputDelta = -1.992524811811336E-9
New Minimum: 2.666336793765994 > 2.666336781810849
F(0.028247524900000005) = LineSearchPoint{point=PointSample{avg=2.666336781810849}, derivative=-4.937660719971618E-7}, evalInputDelta = -1.3947669685876463E-8
New Minimum: 2.666336781810849 > 2.6663366981248333
F(0.19773267430000002) = LineSearchPoint{point=PointSample{avg=2.6663366981248333}, derivative=-4.937660642484538E-7}, evalInputDelta = -9.763368558068919E-8
New Minimum: 2.6663366981248333 > 2.6663361123227594
F(1.3841287201) = LineSearchPoint{point=PointSample{avg=2.6663361123227594}, derivative=-4.937660100075202E-7}, evalInputDelta = -6.834357595408846E-7
New Minimum: 2.6663361123227594 > 2.666332011710043
F(9.688901040700001) = LineSearchPoint{point=PointSample{avg=2.666332011710043}, derivative=-4.937656303209744E-7}, evalInputDelta = -4.784048476036418E-6
New Minimum: 2.666332011710043 > 2.66630330750932
F(67.8223072849) = LineSearchPoint{point=PointSample{avg=2.66630330750932}, derivative=-4.93762972515061E-7}, evalInputDelta = -3.3488249199020714E-5
New Minimum: 2.66630330750932 > 2.666102382430462
F(474.7561509943) = LineSearchPoint{point=PointSample{avg=2.666102382430462}, derivative=-4.937443678737537E-7}, evalInputDelta = -2.3441332805695936E-4
New Minimum: 2.666102382430462 > 2.6646961188624863
F(3323.2930569601003) = LineSearchPoint{point=PointSample{avg=2.6646961188624863}, derivative=-4.936141353846232E-7}, evalInputDelta = -0.0016406768960326445
New Minimum: 2.6646961188624863 > 2.6548626611041057
F(23263.0513987207) = LineSearchPoint{point=PointSample{avg=2.6548626611041057}, derivative=-4.927025079606586E-7}, evalInputDelta = -0.01147413465441316
New Minimum: 2.6548626611041057 > 2.5865374304503206
F(162841.3597910449) = LineSearchPoint{point=PointSample{avg=2.5865374304503206}, derivative=-4.863211159929659E-7}, evalInputDelta = -0.07979936530819831
New Minimum: 2.5865374304503206 > 2.1332005249629358
F(1139889.5185373144) = LineSearchPoint{point=PointSample{avg=2.1332005249629358}, derivative=-4.416513722190762E-7}, evalInputDelta = -0.5331362707955831
New Minimum: 2.1332005249629358 > 0.1818879319177647
F(7979226.6297612) = LineSearchPoint{point=PointSample{avg=0.1818879319177647}, derivative=-1.2896316580184253E-7}, evalInputDelta = -2.484448863840754
F(5.58545864083284E7) = LineSearchPoint{point=PointSample{avg=46.4029413035612}, derivative=2.0598542791187665E-6}, evalInputDelta = 43.73660450780268
F(4296506.646794492) = LineSearchPoint{point=PointSample{avg=0.9668539959703732}, derivative=-2.973337384880465E-7}, evalInputDelta = -1.6994827997881456
F(3.007554652756145E7) = LineSearchPoint{point=PointSample{avg=8.493386671677658}, derivative=8.812602703153736E-7}, evalInputDelta = 5.82704987591914
F(2313503.579043188) = LineSearchPoint{point=PointSample{avg=1.6463583090416112}, derivative=-3.879948160883112E-7}, evalInputDelta = -1.0199784867169077
F(1.6194525053302318E7) = LineSearchPoint{point=PointSample{avg=0.6652332126891098}, derivative=2.466327271135332E-7}, evalInputDelta = -2.0011035830694093
0.6652332126891098 <= 2.666336795758519
New Minimum: 0.1818879319177647 > 2.5235287461476087E-30
F(1.0799999999999989E7) = LineSearchPoint{point=PointSample{avg=2.5235287461476087E-30}, derivative=-4.778769889037711E-22}, evalInputDelta = -2.666336795758519
Left bracket at 1.0799999999999989E7
Converged to left
Fitness changed from 2.666336795758519 to 2.5235287461476087E-30
Iteration 1 complete. Error: 2.5235287461476087E-30 Total: 38.8527; Orientation: 0.8103; Line Search: 36.0892
Final threshold in iteration 1: 2.5235287461476087E-30 (> 0.0) after 38.853s (< 30.000s)

Returns

    2.5235287461476087E-30

Training Converged

Limited-Memory BFGS

Next, we apply the same optimization using L-BFGS, which is nearly ideal for purely second-order or quadratic functions.

TrainingTester.java:509 executed in 344.75 seconds (7.662 gc):

    IterativeTrainer iterativeTrainer = new IterativeTrainer(trainable.addRef());
    try {
      iterativeTrainer.setLineSearchFactory(label -> new ArmijoWolfeSearch());
      iterativeTrainer.setOrientation(new LBFGS());
      iterativeTrainer.setMonitor(TrainingTester.getMonitor(history));
      iterativeTrainer.setTimeout(30, TimeUnit.SECONDS);
      iterativeTrainer.setIterationsPerSample(100);
      iterativeTrainer.setMaxIterations(250);
      iterativeTrainer.setTerminateThreshold(0);
      return iterativeTrainer.run();
    } finally {
      iterativeTrainer.freeRef();
    }
Logging
Reset training subject: 3439054832861
Reset training subject: 3439645350309
Adding measurement 2b0a7420 to history. Total: 0
LBFGS Accumulation History: 1 points
Constructing line search parameters: GD
Non-optimal measurement 2.666336795758519 < 2.666336795758519. Total: 1
th(0)=2.666336795758519;dx=-4.937660732886166E-7
Adding measurement c5d4654 to history. Total: 1
New Minimum: 2.666336795758519 > 2.6663357319718672
WOLFE (weak): th(2.154434690031884)=2.6663357319718672; dx=-4.93765974789842E-7 evalInputDelta=1.0637866516560734E-6
Adding measurement 25051c19 to history. Total: 2
New Minimum: 2.6663357319718672 > 2.6663346681854287
WOLFE (weak): th(4.308869380063768)=2.6663346681854287; dx=-4.937658762910663E-7 evalInputDelta=2.127573090149326E-6
Adding measurement 1679e733 to history. Total: 3
New Minimum: 2.6663346681854287 > 2.6663304130417957
WOLFE (weak): th(12.926608140191302)=2.6663304130417957; dx=-4.937654822959678E-7 evalInputDelta=6.382716723152271E-6
Adding measurement 4ab9012b to history. Total: 4
New Minimum: 2.6663304130417957 > 2.666311264937465
WOLFE (weak): th(51.70643256076521)=2.666311264937465; dx=-4.937637093180472E-7 evalInputDelta=2.5530821053720842E-5
Adding measurement 5caadf0 to history. Total: 5
New Minimum: 2.666311264937465 > 2.6662091428755783
WOLFE (weak): th(258.53216280382605)=2.6662091428755783; dx=-4.93754253435757E-7 evalInputDelta=1.2765288294058763E-4
Adding measurement 150f7b67 to history. Total: 6
New Minimum: 2.6662091428755783 > 2.6655709242980583
WOLFE (weak): th(1551.1929768229563)=2.6655709242980583; dx=-4.936951541714825E-7 evalInputDelta=7.658714604605876E-4
Adding measurement 65cc2467 to history. Total: 7
New Minimum: 2.6655709242980583 > 2.66097800572926
WOLFE (weak): th(10858.350837760694)=2.66097800572926; dx=-4.932696394686533E-7 evalInputDelta=0.005358790029259097
Adding measurement 60d0340 to history. Total: 8
New Minimum: 2.66097800572926 > 2.6236174081968233
WOLFE (weak): th(86866.80670208555)=2.6236174081968233; dx=-4.89794602728945E-7 evalInputDelta=0.042719387561695576
Adding measurement 3510abcd to history. Total: 9
New Minimum: 2.6236174081968233 > 2.2942819104587313
WOLFE (weak): th(781801.26031877)=2.2942819104587313; dx=-4.5802283825160843E-7 evalInputDelta=0.3720548852997876
Adding measurement 2ee59aa4 to history. Total: 10
New Minimum: 2.2942819104587313 > 0.2032727217517815
END: th(7818012.6031877)=0.2032727217517815; dx=-1.363337229185319E-7 evalInputDelta=2.4630640740067373
Fitness changed from 2.666336795758519 to 0.2032727217517815
Iteration 1 complete. Error: 0.2032727217517815 Total: 24.0558; Orientation: 0.9173; Line Search: 21.3612
Non-optimal measurement 0.2032727217517815 < 0.2032727217517815. Total: 11
Rejected: LBFGS Orientation magnitude: 2.095e+03, gradient 1.940e-04, dot -1.000; [58d079b3-2630-4611-b6e1-4e2ddd0688d8 = 1.000/1.000e+00, 84e3abd4-78ce-4b94-b3b4-780ac1e3c465 = 1.000/1.000e+00, 46417d31-d452-47ee-8063-5dfe27478cd9 = 1.000/1.000e+00, eca92560-f307-4e1b-b07a-6bc0ea5c6fe1 = 1.000/1.000e+00, 64fa4178-4e4c-48c0-bbe9-a8c3571240ce = 1.000/1.000e+00]
Orientation rejected. Popping history element from 0.2032727217517815, 2.2942819104587313, 2.6236174081968233, 2.66097800572926, 2.6655709242980583, 2.6662091428755783, 2.666311264937465, 2.6663304130417957, 2.6663346681854287, 2.6663357319718672, 2.666336795758519
Rejected: LBFGS Orientation magnitude: 2.095e+03, gradient 1.940e-04, dot -1.000; [46417d31-d452-47ee-8063-5dfe27478cd9 = 1.000/1.000e+00, 58d079b3-2630-4611-b6e1-4e2ddd0688d8 = 1.000/1.000e+00, eca92560-f307-4e1b-b07a-6bc0ea5c6fe1 = 1.000/1.000e+00, 84e3abd4-78ce-4b94-b3b4-780ac1e3c465 = 1.000/1.000e+00, 64fa4178-4e4c-48c0-bbe9-a8c3571240ce = 1.000/1.000e+00]
Orientation rejected. Popping history element from 0.2032727217517815, 2.2942819104587313, 2.6236174081968233, 2.66097800572926, 2.6655709242980583, 2.6662091428755783, 2.666311264937465, 2.6663304130417957, 2.6663346681854287, 2.6663357319718672
Rejected: LBFGS Orientation magnitude: 2.095e+03, gradient 1.940e-04, dot -1.000; [58d079b3-2630-4611-b6e1-4e2ddd0688d8 = 1.000/1.000e+00, 84e3abd4-78ce-4b94-b3b4-780ac1e3c465 = 1.000/1.000e+00, 46417d31-d452-47ee-8063-5dfe27478cd9 = 1.000/1.000e+00, eca92560-f307-4e1b-b07a-6bc0ea5c6fe1 = 1.000/1.000e+00, 64fa4178-4e4c-48c0-bbe9-a8c3571240ce = 1.000/1.000e+00]
Orientation rejected. Popping history element from 0.2032727217517815, 2.2942819104587313, 2.6236174081968233, 2.66097800572926, 2.6655709242980583, 2.6662091428755783, 2.666311264937465, 2.6663304130417957, 2.6663346681854287
Rejected: LBFGS Orientation magnitude: 2.095e+03, gradient 1.940e-04, dot -1.000; [64fa4178-4e4c-48c0-bbe9-a8c3571240ce = 1.000/1.000e+00, 46417d31-d452-47ee-8063-5dfe27478cd9 = 1.000/1.000e+00, 58d079b3-2630-4611-b6e1-4e2ddd0688d8 = 1.000/1.000e+00, eca92560-f307-4e1b-b07a-6bc0ea5c6fe1 = 1.000/1.000e+00, 84e3abd4-78ce-4b94-b3b4-780ac1e3c465 = 1.000/1.000e+00]
Orientation rejected. Popping history element from 0.2032727217517815, 2.2942819104587313, 2.6236174081968233, 2.66097800572926, 2.6655709242980583, 2.6662091428755783, 2.666311264937465, 2.6663304130417957
Rejected: LBFGS Orientation magnitude: 2.095e+03, gradient 1.940e-04, dot -1.000; [58d079b3-2630-4611-b6e1-4e2ddd0688d8 = 1.000/1.000e+00, 64fa4178-4e4c-48c0-bbe9-a8c3571240ce = 1.000/1.000e+00, 46417d31-d452-47ee-8063-5dfe27478cd9 = 1.000/1.000e+00, eca92560-f307-4e1b-b07a-6bc0ea5c6fe1 = 1.000/1.000e+00, 84e3abd4-78ce-4b94-b3b4-780ac1e3c465 = 1.000/1.000e+00]
Orientation rejected. Popping history element from 0.2032727217517815, 2.2942819104587313, 2.6236174081968233, 2.66097800572926, 2.6655709242980583, 2.6662091428755783, 2.666311264937465
Rejected: LBFGS Orientation magnitude: 2.095e+03, gradient 1.940e-04, dot -1.000; [64fa4178-4e4c-48c0-bbe9-a8c3571240ce = 1.000/1.000e+00, 84e3abd4-78ce-4b94-b3b4-780ac1e3c465 = 1.000/1.000e+00, 58d079b3-2630-4611-b6e1-4e2ddd0688d8 = 1.000/1.000e+00, 46417d31-d452-47ee-8063-5dfe27478cd9 = 1.000/1.000e+00, eca92560-f307-4e1b-b07a-6bc0ea5c6fe1 = 1.000/1.000e+00]
Orientation rejected. Popping history element from 0.2032727217517815, 2.2942819104587313, 2.6236174081968233, 2.66097800572926, 2.6655709242980583, 2.6662091428755783
Rejected: LBFGS Orientation magnitude: 2.095e+03, gradient 1.940e-04, dot -1.000; [64fa4178-4e4c-48c0-bbe9-a8c3571240ce = 1.000/1.000e+00, 84e3abd4-78ce-4b94-b3b4-780ac1e3c465 = 1.000/1.000e+00, 46417d31-d452-47ee-8063-5dfe27478cd9 = 1.000/1.000e+00, eca92560-f307-4e1b-b07a-6bc0ea5c6fe1 = 1.000/1.000e+00, 58d079b3-2630-4611-b6e1-4e2ddd0688d8 = 1.000/1.000e+00]
Orientation rejected. Popping history element from 0.2032727217517815, 2.2942819104587313, 2.6236174081968233, 2.66097800572926, 2.6655709242980583
Rejected: LBFGS Orientation magnitude: 2.095e+03, gradient 1.940e-04, dot -1.000; [eca92560-f307-4e1b-b07a-6bc0ea5c6fe1 = 1.000/1.000e+00, 84e3abd4-78ce-4b94-b3b4-780ac1e3c465 = 1.000/1.000e+00, 58d079b3-2630-4611-b6e1-4e2ddd0688d8 = 1.000/1.000e+00, 64fa4178-4e4c-48c0-bbe9-a8c3571240ce = 1.000/1.000e+00, 46417d31-d452-47ee-8063-5dfe27478cd9 = 1.000/1.000e+00]
Orientation rejected. Popping history element from 0.2032727217517815, 2.2942819104587313, 2.6236174081968233, 2.66097800572926
LBFGS Accumulation History: 3 points
Removed measurement 2ee59aa4 to history. Total: 10
Removed measurement 3510abcd to history. Total: 9
Removed measurement 60d0340 to history. Total: 8
Removed measurement 65cc2467 to history. Total: 7
Removed measurement 150f7b67 to history. Total: 6
Removed measurement 5caadf0 to history. Total: 5
Removed measurement 4ab9012b to history. Total: 4
Removed measurement 1679e733 to history. Total: 3
Adding measurement 34e915bb to history. Total: 3
th(0)=0.2032727217517815;dx=-3.7643096620700105E-8
Adding measurement 30bf51c0 to history. Total: 4
New Minimum: 0.2032727217517815 > 0.06364934239201818
WOLF (strong): th(1.684339755941405E7)=0.06364934239201818; dx=2.1064092430215497E-8 evalInputDelta=0.13962337935976332
Adding measurement 1901e4d7 to history. Total: 5
New Minimum: 0.06364934239201818 > 0.009857466474368064
END: th(8421698.779707026)=0.009857466474368064; dx=-8.289502095242375E-9 evalInputDelta=0.19341525527741343
Fitness changed from 0.2032727217517815 to 0.009857466474368064
Iteration 2 complete. Error: 0.009857466474368064 Total: 320.6901; Orientation: 315.4054; Line Search: 4.6951
Final threshold in iteration 2: 0.009857466474368064 (> 0.0) after 344.747s (< 30.000s)

Returns

    0.009857466474368064

This training apply resulted in the following configuration:

TrainingTester.java:610 executed in 0.00 seconds (0.000 gc):

    RefList<double[]> state = network.state();
    assert state != null;
    String description = state.stream().map(RefArrays::toString).reduce((a, b) -> a + "\n" + b)
        .orElse("");
    state.freeRef();
    return description;

Returns

    

And regressed input:

TrainingTester.java:622 executed in 0.01 seconds (0.000 gc):

    return RefArrays.stream(RefUtil.addRef(data)).flatMap(x -> {
      return RefArrays.stream(x);
    }).limit(1).map(x -> {
      String temp_18_0015 = x.prettyPrint();
      x.freeRef();
      return temp_18_0015;
    }).reduce((a, b) -> a + "\n" + b).orElse("");

Returns

    [
    	[ [ -1.8205614819649094, 1.5728382429546783, -1.255492189285087 ], [ 1.0641086067134502, -1.1331028415891822, 0.48463025372660706 ], [ 1.8242876591315766, 0.5885262591125714, 0.07886654765789453 ], [ -0.6845124228143277, 1.4898387638728035, 1.2118884551301137 ], [ -0.62343096777924, 0.6251594509956143, -0.5341582979707608 ], [ 0.4384979544093553, 1.2161178309122795, 1.4707763893421029 ], [ 1.8643396564385946, -1.0869797664707592, -0.8544245705482431 ], [ 1.3975557168406414, -0.10952510608771776, 1.6913974188698806 ], ... ],
    	[ [ -1.8139901436944421, -0.9212038980467818, -1.5299503087426896 ], [ -0.1524390389532155, -1.507010377223683, -0.851775236317249 ], [ -0.5204956483596479, -1.1537989289210506, -1.7592888121564303 ], [ 1.955948002692982, -0.7770386819268991, -0.9558012349707581 ], [ -0.5207862456476605, 1.051083761084795, 0.578575950369882 ], [ 0.12938127652192943, 0.3075701852456137, -1.7362692107339184 ], [ -1.7861339732602306, -1.8482264376257296, -1.1113101987105247 ], [ 0.44256672617105275, -0.28593186034503104, 0.22556326709649177 ], ... ],
    	[ [ 1.2036452430497044, -1.4426585584298233, -0.9909659301725156 ], [ 0.06131544291666782, -1.055614632296781, 0.6776244886023388 ], [ 1.464174440318712, -1.3753760323157862, 0.4453858886213441 ], [ 0.34323220274999794, -1.302363349042396, 1.1544031839941489 ], [ -1.258106300663743, -0.8832657516593573, -0.104 ], [ 1.4932986684619882, -1.280025998653509, -0.913694934307015 ], [ -1.528257048378653, -1.0639220040394735, -0.48476947119298075 ], [ 0.8795135758391813, 1.675813397326023, -0.03322989670029064 ], ... ],
    	[ [ -1.066103362507307, -0.993482332979529, 0.18831659594152148 ], [ 0.1069399315190066, -0.4612680577090646, 1.144942237568714 ], [ -0.20569787246345095, -1.4817775423669564, -0.7365644201213454 ], [ 0.7879786134459057, -0.7693385034137409, -0.7459716952967838 ], [ -0.1984719557558463, -0.8764268765979505, -1.3093905007207587 ], [ 0.29410168856432833, -1.5903755113976612, -0.3501155248625721 ], [ 1.3160612215058471, 1.0409942348757317, 0.2563962658450269 ], [ 1.11050256650877, -1.2805522577660804, -0.25195261479239683 ], ... ],
    	[ [ 1.3359740013464911, -1.3499733692397626, -1.2005782564195893 ], [ -0.13405891545613968, 1.8047786953918101, -1.63987461883187 ], [ -0.17774986977046872, -0.014309677792399556, -0.8874355798786546 ], [ -0.24585092622806848, 1.361893699336257, 0.6195701852456137 ], [ -0.10025704837865312, -0.9269185449649123, -0.2735984899488299 ], [ 0.08996247109795458, -1.3285476456666656, 0.21806646571199023 ], [ 0.9446730268347957, -0.739926616138888, -1.8752703637587718 ], [ 0.27748988323537965, 1.7078884551301137, -0.20055686986549498 ], ... ],
    	[ [ 1.301187123592102, 1.4071663691447363, -0.2493980509766091 ], [ -1.5990271516783627, 0.4730202335292406, -1.100373205347954 ], [ -1.1401530537646174, 0.22601908050438704, -0.37300345907456095 ], [ -1.381187123592102, -1.8999572268918115, -0.7873668081169571 ], [ -1.6314355798786544, 0.829633712801168, 1.0450768429356732 ], [ -1.1972131222456108, -1.4965384214678366, -1.5964390389532155 ], [ -0.20736974627339294, -0.16864241608187214, 1.5130785168786516 ], [ 1.5239190658830375, 1.4962922712309914, -0.4994234175233894 ], ... ],
    	[ [ 1.2968596295087726, 0.848890240261696, 0.856160604020468 ], [ 0.18586308858333356, 0.9359052295847936, -1.735154206789471 ], [ -0.3487128617865484, 1.2504719557558464, 1.0614332738289474 ], [ -1.0628329987485352, -0.24791739194005868, 0.6317001785131582 ], [ -0.0739884697514635, 0.5512489772046776, -0.6604528752514593 ], [ 1.0261016885643284, -1.265785092622807, -1.6488046940453192 ], [ 1.2438532322777758, -0.7317475637207616, 0.804023060497073 ], [ 1.2830196014225121, -0.42833043223976525, -0.7634049691257311 ], ... ],
    	[ [ 0.9621033625073069, 0.5079052295847937, -0.6945621140716381 ], [ -0.715530350293861, 1.2900681396549687, -0.8092512832543849 ], [ 0.8667579409444442, -1.05416752216959, -1.248347206694445 ], [ 1.2463419624883016, 0.2537469316140329, 1.3472749758581863 ], [ 0.43022413157602235, 1.8658769248815774, -0.8328306926988278 ], [ 1.858523953062864, -0.26112422814327624, -1.2781721342690047 ], [ -0.5552611395599427, -1.6942073571213427, -1.3175892657500008 ], [ 0.14437320534795386, -1.8410126832733902, 1.3733431155131555 ], ... ],
    	...
    ]

To produce the following output:

TrainingTester.java:633 executed in 0.11 seconds (0.000 gc):

    Result[] array = ConstantResult.batchResultArray(pop(RefUtil.addRef(data)));
    @Nullable
    Result eval = layer.eval(array);
    assert eval != null;
    TensorList tensorList = Result.getData(eval);
    String temp_18_0016 = tensorList.stream().limit(1).map(x -> {
      String temp_18_0017 = x.prettyPrint();
      x.freeRef();
      return temp_18_0017;
    }).reduce((a, b) -> a + "\n" + b).orElse("");
    tensorList.freeRef();
    return temp_18_0016;

Returns

    [
    	[ [ 0.37101037722368285, -1.6416689356535061, 0.31593877849415297 ], [ 0.4429323812631562, -0.5897850926228069, 0.11463717187572897 ], [ 0.849733095315789, -1.2978861490804066, 0.25597861344590567 ], [ -1.6974424980277765, -0.15666086447953054, 0.81061117322222 ], [ -1.1745545638157877, 0.34741419332456025, 0.5151097597383039 ], [ 1.0676955664137435, -1.411900617485379, -0.2513009745116954 ], [ -1.7894898832353796, 1.1571640630950288, 0.69130558661111 ], [ 0.4550791489853804, 0.38488562816228156, -0.17309361739035278 ], ... ],
    	[ [ -0.1210600684809934, -0.9210294577280698, 0.4233177489663751 ], [ -1.1924344268538007, -0.8369376254692993, -1.445312504760232 ], [ 1.731144982590642, 1.021289444263159, -1.4668145503508767 ], [ -0.934637171875729, -1.3236834040584784, -1.1060375289020454 ], [ 0.2474754148304072, -1.4601316672105233, 0.2306799449839176 ], [ 0.9016244886023388, -0.14201908050438705, 0.38703176377777715 ], [ -1.1155701852456137, 1.8137515437134475, -0.29230904568567095 ], [ -0.47610399461403563, 1.385052518225143, 1.2815678791959064 ], ... ],
    	[ [ 1.5743679611418107, 1.1268145503508766, -1.0230271516783627 ], [ 1.4369728483216373, 0.4523685932485393, 0.8254823329795291 ], [ 0.24261411137865593, 0.19467072078508837, -1.8354448040774838 ], [ 0.6521958268728061, 0.20932927921491165, 0.8891825114926873 ], [ 0.055326973165204324, 1.1648994644605253, 1.5098647625263122 ], [ 0.8481744403187118, -1.6857024845628654, -0.6597307892660818 ], [ 1.7894563343260201, -0.6253645020672498, -0.5194141933245602 ], [ -0.4560918322587705, -1.352939299412278, 1.2355089637397665 ], ... ],
    	[ [ -0.15343788592836188, -0.5426891691827469, -0.5922356618245588 ], [ 0.932408428200292, 0.8456429369999972, -0.8274968013845017 ], [ 0.10842520265497169, 1.5579213719327447, -1.058193520823099 ], [ -1.5845522577660802, 0.9761559919210534, 0.8847434725394718 ], [ -1.6230317637777771, 0.12265394633040866, 1.0707457785891792 ], [ -0.5395701852456137, -0.5923472066944451, -1.76170709666228 ], [ -0.98593353428801, -0.8152963624122809, 0.8381767463684191 ], [ -1.0117446255643254, -0.380347206694445, 0.682575950369882 ], ... ],
    	[ [ 1.503366808116957, 0.24343557987865458, -0.6539335342880099 ], [ -1.5487862456476607, 0.8313194229093539, 0.94926344560965 ], [ -0.8772085101461964, 1.2633529718187133, -1.8974684966812854 ], [ 1.255862456476605, 1.531132820235377, -0.04934144157017667 ], [ 1.3767648590935662, 0.32518544964912327, 1.5562004389722206 ], [ -1.6961652161198826, 0.6180543033567251, 1.1833483597192986 ], [ 1.399692628257308, -0.4309093207660831, -1.4139029235350864 ], [ -1.190240906030702, -0.7138157033757304, -0.2336198765029242 ], ... ],
    	[ [ -0.7224199584488284, 1.56340664306871, -0.020051997307017816 ], [ 0.91713806444152, 0.9849652980657869, 1.7408382429546783 ], [ -1.0714829650862578, -1.2145239530628642, -1.6707030054809904 ], [ -0.7272582014035068, 1.6775112697894738, 1.4172512832543849 ], [ 1.5461339732602306, -0.7342333557748516, -1.395501413483916 ], [ -0.26693238126315616, 0.10036398114912465, 0.1515043516403521 ], [ -0.548816856400584, -0.8510271516783625, 1.6731472886403493 ], [ 0.3719098416842082, 0.7409468496681285, -1.5819595329415186 ], ... ],
    	[ [ -0.30758695970029337, 1.1598364578230962, -0.4297498697704687 ], [ 0.828890240261696, 1.8731426765409347, 1.6286654765789452 ], [ -0.4118226215248522, -1.8814684966812854, 0.718724392035085 ], [ -0.8818723127821627, 0.9956100201973664, -1.6655909396929796 ], [ 1.8555441865921047, -0.8530952913333314, -1.3544107342499991 ], [ -0.49046734365643174, 0.026201071078949312, 1.3187763893421027 ], [ 0.44428304703216204, 0.4104841181111114, -1.549508331633038 ], [ -0.4585621140716381, 1.4268191624502913, 1.273073904779237 ], ... ],
    	[ [ -0.22183708992982465, -1.6112888121564302, 1.3481132188128648 ], [ -1.3343419624883017, 0.5096597114546769, -0.5838041731271939 ], [ 1.5758532322777756, -0.4581796845248551, -0.18012999326754453 ], [ -0.1659364724444457, 0.6387839395979532, 1.0818677006827482 ], [ -0.1879312282383025, 1.2716435691067258, 0.03790753563450089 ], [ 0.524790857747075, -1.3562172134269006, 1.747701852456137 ], [ -1.49791675983333, 1.3225713382704674, -0.749038681926899 ], [ -1.1314141933245603, -0.9325124228143277, -0.39302023352924065 ], ... ],
    	...
    ]

TrainingTester.java:432 executed in 0.17 seconds (0.000 gc):

    return TestUtil.compare(title + " vs Iteration", runs);
Logging
Plotting range=[1.0, -29.597991743744966], [3.0, -0.6919208977394027]; valueStats=DoubleSummaryStatistics{count=6, sum=0.426513, min=0.000000, average=0.071085, max=0.203273}
Plotting 3 points for GD
Only 1 points for CjGD
Plotting 2 points for LBFGS

Returns

Result

TrainingTester.java:435 executed in 0.01 seconds (0.000 gc):

    return TestUtil.compareTime(title + " vs Time", runs);
Logging
Plotting range=[0.0, -29.597991743744966], [320.69, -0.6919208977394027]; valueStats=DoubleSummaryStatistics{count=6, sum=0.426513, min=0.000000, average=0.071085, max=0.203273}
Plotting 3 points for GD
Only 1 points for CjGD
Plotting 2 points for LBFGS

Returns

Result

Results

TrainingTester.java:255 executed in 0.00 seconds (0.000 gc):

    return grid(inputLearning, modelLearning, completeLearning);

Returns

Result

TrainingTester.java:258 executed in 0.00 seconds (0.000 gc):

    return new ComponentResult(null == inputLearning ? null : inputLearning.value,
        null == modelLearning ? null : modelLearning.value, null == completeLearning ? null : completeLearning.value);

Returns

    {"input":{ "LBFGS": { "type": "NonConverged", "value": 0.009857466474368064 }, "CjGD": { "type": "Converged", "value": 2.5235287461476087E-30 }, "GD": { "type": "NonConverged", "value": 2.523511417438223E-4 } }, "model":null, "complete":null}

LayerTests.java:425 executed in 0.00 seconds (0.000 gc):

    throwException(exceptions.addRef());

Results

detailsresult
{"input":{ "LBFGS": { "type": "NonConverged", "value": 0.009857466474368064 }, "CjGD": { "type": "Converged", "value": 2.5235287461476087E-30 }, "GD": { "type": "NonConverged", "value": 2.523511417438223E-4 } }, "model":null, "complete":null}OK
  {
    "result": "OK",
    "performance": {
      "execution_time": "419.286",
      "gc_time": "13.168"
    },
    "created_on": 1586737997241,
    "file_name": "trainingTest",
    "report": {
      "simpleName": "Basic",
      "canonicalName": "com.simiacryptus.mindseye.layers.cudnn.ImgTileCycleLayerTest.Basic",
      "link": "https://github.com/SimiaCryptus/mindseye-cudnn/tree/59d5b3318556370acb2d83ee6ec123ce0fc6974f/src/test/java/com/simiacryptus/mindseye/layers/cudnn/ImgTileCycleLayerTest.java",
      "javaDoc": ""
    },
    "training_analysis": {
      "input": {
        "LBFGS": {
          "type": "NonConverged",
          "value": 0.009857466474368064
        },
        "CjGD": {
          "type": "Converged",
          "value": 2.5235287461476087E-30
        },
        "GD": {
          "type": "NonConverged",
          "value": 2.523511417438223E-4
        }
      }
    },
    "archive": "s3://code.simiacrypt.us/tests/com/simiacryptus/mindseye/layers/cudnn/ImgTileCycleLayer/Basic/trainingTest/202004133317",
    "id": "a1a82dda-1d53-414c-8265-4ae1ac968331",
    "report_type": "Components",
    "display_name": "Comparative Training",
    "target": {
      "simpleName": "ImgTileCycleLayer",
      "canonicalName": "com.simiacryptus.mindseye.layers.cudnn.ImgTileCycleLayer",
      "link": "https://github.com/SimiaCryptus/mindseye-cudnn/tree/59d5b3318556370acb2d83ee6ec123ce0fc6974f/src/main/java/com/simiacryptus/mindseye/layers/cudnn/ImgTileCycleLayer.java",
      "javaDoc": ""
    }
  }