1. Test Modules
  2. Training Characteristics
    1. Input Learning
      1. Gradient Descent
      2. Conjugate Gradient Descent
      3. Limited-Memory BFGS
    2. Results
  3. Results

Subreport: Logs for com.simiacryptus.ref.lang.ReferenceCountingBase

Test Modules

Using Seed 698245373088819200

Training Characteristics

Input Learning

In this apply, we use a network to learn this target input, given it's pre-evaluated output:

TrainingTester.java:332 executed in 0.01 seconds (0.000 gc):

    return RefArrays.stream(RefUtil.addRef(input_target)).flatMap(RefArrays::stream).map(x -> {
      try {
        return x.prettyPrint();
      } finally {
        x.freeRef();
      }
    }).reduce((a, b) -> a + "\n" + b).orElse("");

Returns

    [
    	[ [ 1.64, 1.612 ], [ -1.856, 1.524 ], [ 0.3, -1.492 ], [ 0.996, 0.048 ], [ 0.092, 1.556 ], [ 1.704, -0.712 ] ],
    	[ [ -1.572, -1.028 ], [ -1.688, -0.804 ], [ -0.628, 1.62 ], [ -0.384, 1.98 ], [ -0.068, -1.156 ], [ -0.556, 0.148 ] ],
    	[ [ -0.128, -1.476 ], [ -1.228, 0.048 ], [ -1.832, 0.184 ], [ -0.012, 0.496 ], [ 1.368, -0.464 ], [ -0.408, -0.876 ] ],
    	[ [ 0.08, 1.356 ], [ 1.652, -1.72 ], [ 0.788, -0.804 ], [ -1.616, -0.636 ], [ -1.516, 1.556 ], [ 1.108, 1.032 ] ],
    	[ [ -1.424, -0.768 ], [ -0.384, 1.552 ], [ 1.876, 1.912 ], [ -0.892, -0.176 ], [ -0.504, 0.028 ], [ -0.608, -0.032 ] ],
    	[ [ 0.636, -0.472 ], [ 1.512, -1.16 ], [ 1.048, 0.392 ], [ 1.208, 1.764 ], [ -0.852, 0.7 ], [ -0.368, -1.54 ] ]
    ]
    [
    	[ [ -1.476, 1.704 ], [ 1.64, -0.012 ], [ -1.424, 0.092 ], [ 0.184, 1.98 ], [ 1.032, -0.556 ], [ 0.08, 1.764 ] ],
    	[ [ -1.028, 0.148 ], [ 0.048, 0.7 ], [ 1.652, -0.032 ], [ 1.612, -0.628 ], [ 1.368, -1.156 ], [ -0.068, 1.556 ] ],
    	[ [ -1.572, -1.616 ], [ 0.996, -1.832 ], [ -1.228, -1.54 ], [ 1.208, 0.3 ], [ 1.512, -0.804 ], [ -0.368, 1.556 ] ],
    	[ [ -1.516, 0.788 ], [ -1.688, 0.392 ], [ 0.048, 1.552 ], [ -0.892, -0.712 ], [ 1.876, -1.492 ], [ 1.912, 0.636 ] ],
    	[ [ 0.028, 1.524 ], [ -1.72, -0.636 ], [ -0.852, -0.504 ], [ -0.176, 1.62 ], [ -0.876, -1.856 ], [ -1.16, -0.472 ] ],
    	[ [ -0.768, 1.356 ], [ -0.408, -0.804 ], [ 1.048, -0.128 ], [ -0.608, 1.108 ], [ -0.384, 0.496 ], [ -0.464, -0.384 ] ]
    ]
    [
    	[ [ 0.08, -0.384 ], [ 1.356, -1.228 ], [ -1.832, 1.368 ], [ 0.048, -0.504 ], [ -0.804, -1.16 ], [ -1.572, -1.516 ] ],
    	[ [ -1.856, -1.54 ], [ 1.032, -0.384 ], [ 1.552, 0.048 ], [ 1.704, -1.688 ], [ -1.72, 0.496 ], [ -1.476, -0.408 ] ],
    	[ [ 1.912, 1.62 ], [ 1.512, -0.876 ], [ 0.3, 1.524 ], [ -1.616, -0.636 ], [ -0.472, -0.368 ], [ 0.092, 1.556 ] ],
    	[ [ -0.892, 1.612 ], [ -0.176, -0.628 ], [ 1.208, 1.98 ], [ -0.556, -1.492 ], [ 0.788, -1.156 ], [ 1.652, 0.184 ] ],
    	[ [ -0.012, 0.636 ], [ -1.028, -0.068 ], [ 0.996, -0.804 ], [ 1.048, -0.032 ], [ 1.876, -0.608 ], [ 0.392, 0.028 ] ],
    	[ [ 0.7, 0.148 ], [ -0.712, 1.764 ], [ -0.768, -1.424 ], [ 1.64, 1.108 ], [ -0.464, -0.128 ], [ 1.556, -0.852 ] ]
    ]
    [
    	[ [ 0.7, -1.616 ], [ -0.804, 0.048 ], [ -1.028, -1.228 ], [ -0.804, -1.492 ], [ 1.556, 0.184 ], [ 0.3, -0.608 ] ],
    	[ [ -0.852, 0.496 ], [ 0.092, -0.032 ], [ -0.636, 1.62 ], [ -0.504, -0.464 ], [ -0.892, 1.512 ], [ -0.472, -1.424 ] ],
    	[ [ 1.876, 0.08 ], [ -0.768, -1.54 ], [ 1.048, -0.712 ], [ 1.208, -1.856 ], [ -0.628, 0.636 ], [ 1.556, -1.688 ] ],
    	[ [ 0.048, -0.876 ], [ 1.764, -1.16 ], [ 1.612, 0.028 ], [ -0.068, 1.108 ], [ 0.392, 1.64 ], [ -0.176, -0.384 ] ],
    	[ [ -0.368, -0.556 ], [ 1.652, -1.572 ], [ -1.516, -1.832 ], [ 1.524, -0.384 ], [ 0.996, 0.148 ], [ -1.156, 1.912 ] ],
    	[ [ -0.012, -0.128 ], [ 1.552, 1.356 ], [ 0.788, -1.476 ], [ 1.032, 1.704 ], [ -1.72, -0.408 ], [ 1.368, 1.98 ] ]
    ]
    [
    	[ [ -0.608, -1.028 ], [ 0.092, 0.636 ], [ -1.832, -0.556 ], [ -0.804, -1.72 ], [ 0.788, 0.184 ], [ 1.704, 0.996 ] ],
    	[ [ -0.852, 1.64 ], [ 1.612, -0.032 ], [ 0.08, -1.156 ], [ -0.368, 0.392 ], [ -1.572, -1.616 ], [ -1.856, -0.876 ] ],
    	[ [ -1.688, -0.128 ], [ 1.652, 1.98 ], [ -0.472, 0.496 ], [ 0.028, 0.3 ], [ -1.516, -0.628 ], [ -0.176, -1.492 ] ],
    	[ [ 1.764, 1.556 ], [ 1.512, 1.356 ], [ 1.556, 0.7 ], [ -0.636, -0.012 ], [ 1.208, -0.384 ], [ -0.768, 1.552 ] ],
    	[ [ -1.16, -0.892 ], [ 1.108, 1.62 ], [ 0.148, 1.048 ], [ -1.476, 0.048 ], [ 1.032, -0.464 ], [ -0.712, 1.912 ] ],
    	[ [ -0.504, 1.368 ], [ 1.524, -1.424 ], [ -1.54, -0.804 ], [ -0.408, 0.048 ], [ -0.384, 1.876 ], [ -1.228, -0.068 ] ]
    ]

Gradient Descent

First, we train using basic gradient descent method apply weak line search conditions.

TrainingTester.java:480 executed in 0.53 seconds (0.000 gc):

    IterativeTrainer iterativeTrainer = new IterativeTrainer(trainable.addRef());
    try {
      iterativeTrainer.setLineSearchFactory(label -> new ArmijoWolfeSearch());
      iterativeTrainer.setOrientation(new GradientDescent());
      iterativeTrainer.setMonitor(TrainingTester.getMonitor(history));
      iterativeTrainer.setTimeout(30, TimeUnit.SECONDS);
      iterativeTrainer.setMaxIterations(250);
      iterativeTrainer.setTerminateThreshold(0);
      return iterativeTrainer.run();
    } finally {
      iterativeTrainer.freeRef();
    }
Logging
Reset training subject: 4965393296681
Reset training subject: 4965412938391
Constructing line search parameters: GD
th(0)=540.0397938961208;dx=-1.9024748288122544E26
New Minimum: 540.0397938961208 > 0.0
Armijo: th(2.154434690031884)=0.0; dx=-3.470903878411495E14 evalInputDelta=540.0397938961208
Armijo: th(1.077217345015942)=0.0; dx=-3.4709038784114944E14 evalInputDelta=540.0397938961208
Armijo: th(0.3590724483386473)=0.0; dx=-3.4709038784114944E14 evalInputDelta=540.0397938961208
Armijo: th(0.08976811208466183)=0.0; dx=-3.470903878411495E14 evalInputDelta=540.0397938961208
Armijo: th(0.017953622416932366)=0.0; dx=-3.4709038784114944E14 evalInputDelta=540.0397938961208
Armijo: th(0.002992270402822061)=0.0; dx=-3.4709038784114956E14 evalInputDelta=540.0397938961208
Armijo: th(4.2746720040315154E-4)=0.0; dx=-3.470903878411495E14 evalInputDelta=540.0397938961208
Armijo: th(5.343340005039394E-5)=0.0; dx=-3.470903878411495E14 evalInputDelta=540.0397938961208
Armijo: th(5.9370444500437714E-6)=0.0; dx=-3.470903878411495E14 evalInputDelta=540.0397938961208
Armijo: th(5.937044450043771E-7)=0.0; dx=-3.470903878411495E14 evalInputDelta=540.0397938961208
Armijo: th(5.397313136403428E-8)=0.0; dx=-3.470903878411495E14 evalInputDelta=540.0397938961208
Armijo: th(4.4977609470028565E-9)=0.0; dx=-3.470903878411495E14 evalInputDelta=540.0397938961208
Armijo: th(3.4598161130791205E-10)=0.0; dx=-3.4709038784114944E14 evalInputDelta=540.0397938961208
Armijo: th(2.4712972236279432E-11)=0.0; dx=-3.470903878411495E14 evalInputDelta=540.0397938961208
Armijo: th(1.6475314824186289E-12)=0.0; dx=-3.470903878411495E14 evalInputDelta=540.0397938961208
Armijo: th(1.029707176511643E-13)=258.54442882610743; dx=-8.907855872080054E25 evalInputDelta=281.4953650700134
Armijo: th(6.057101038303783E-15)=528.9641078099528; dx=-1.8667133696107457E26 evalInputDelta=11.075686086168048
MIN ALPHA (3.3650561323909904E-16): th(2.154434690031884)=0.0
Fitness changed from 540.0397938961208 to 0.0
Iteration 1 complete. Error: 0.0 Total: 0.4763; Orientation: 0.0033; Line Search: 0.4423
th(0)=0.0;dx=-642.33605888
Armijo: th(2.154434690031884E-15)=0.0; dx=-642.3360588800001 evalInputDelta=0.0
Armijo: th(1.077217345015942E-15)=0.0; dx=-642.3360588800001 evalInputDelta=0.0
MIN ALPHA (3.5907244833864734E-16): th(0.0)=0.0
Fitness changed from 0.0 to 0.0
Static Iteration Total: 0.0418; Orientation: 0.0020; Line Search: 0.0327
Iteration 2 failed. Error: 0.0
Previous Error: 0.0 -> 0.0
Optimization terminated 2
Final threshold in iteration 2: 0.0 (> 0.0) after 0.518s (< 30.000s)

Returns

    0.0

Training Converged

Conjugate Gradient Descent

First, we use a conjugate gradient descent method, which converges the fastest for purely linear functions.

TrainingTester.java:452 executed in 0.34 seconds (0.000 gc):

    IterativeTrainer iterativeTrainer = new IterativeTrainer(trainable.addRef());
    try {
      iterativeTrainer.setLineSearchFactory(label -> new QuadraticSearch());
      iterativeTrainer.setOrientation(new GradientDescent());
      iterativeTrainer.setMonitor(TrainingTester.getMonitor(history));
      iterativeTrainer.setTimeout(30, TimeUnit.SECONDS);
      iterativeTrainer.setMaxIterations(250);
      iterativeTrainer.setTerminateThreshold(0);
      return iterativeTrainer.run();
    } finally {
      iterativeTrainer.freeRef();
    }
Logging
Reset training subject: 4965918289785
Reset training subject: 4965924388607
Constructing line search parameters: GD
F(0.0) = LineSearchPoint{point=PointSample{avg=540.0397938961208}, derivative=-1.9024748288122544E26}
New Minimum: 540.0397938961208 > 0.0
F(1.0E-10) = LineSearchPoint{point=PointSample{avg=0.0}, derivative=-3.470903878411495E14}, evalInputDelta = -540.0397938961208
F(7.000000000000001E-10) = LineSearchPoint{point=PointSample{avg=0.0}, derivative=-3.470903878411495E14}, evalInputDelta = -540.0397938961208
F(4.900000000000001E-9) = LineSearchPoint{point=PointSample{avg=0.0}, derivative=-3.470903878411495E14}, evalInputDelta = -540.0397938961208
F(3.430000000000001E-8) = LineSearchPoint{point=PointSample{avg=0.0}, derivative=-3.470903878411495E14}, evalInputDelta = -540.0397938961208
F(2.4010000000000004E-7) = LineSearchPoint{point=PointSample{avg=0.0}, derivative=-3.470903878411495E14}, evalInputDelta = -540.0397938961208
F(1.6807000000000003E-6) = LineSearchPoint{point=PointSample{avg=0.0}, derivative=-3.470903878411495E14}, evalInputDelta = -540.0397938961208
F(1.1764900000000001E-5) = LineSearchPoint{point=PointSample{avg=0.0}, derivative=-3.470903878411495E14}, evalInputDelta = -540.0397938961208
F(8.235430000000001E-5) = LineSearchPoint{point=PointSample{avg=0.0}, derivative=-3.470903878411495E14}, evalInputDelta = -540.0397938961208
F(5.764801000000001E-4) = LineSearchPoint{point=PointSample{avg=0.0}, derivative=-3.470903878411495E14}, evalInputDelta = -540.0397938961208
F(0.004035360700000001) = LineSearchPoint{point=PointSample{avg=0.0}, derivative=-3.470903878411495E14}, evalInputDelta = -540.0397938961208
F(0.028247524900000005) = LineSearchPoint{point=PointSample{avg=0.0}, derivative=-3.470903878411495E14}, evalInputDelta = -540.0397938961208
F(0.19773267430000002) = LineSearchPoint{point=PointSample{avg=0.0}, derivative=-3.4709038784114944E14}, evalInputDelta = -540.0397938961208
F(1.3841287201) = LineSearchPoint{point=PointSample{avg=0.0}, derivative=-3.470903878411495E14}, evalInputDelta = -540.0397938961208
F(9.688901040700001) = LineSearchPoint{point=PointSample{avg=0.0}, derivative=-3.470903878411495E14}, evalInputDelta = -540.0397938961208
F(67.8223072849) = LineSearchPoint{point=PointSample{avg=0.0}, derivative=-3.4709038784114944E14}, evalInputDelta = -540.0397938961208
F(474.7561509943) = LineSearchPoint{point=PointSample{avg=0.0}, derivative=-3.470903878411495E14}, evalInputDelta = -540.0397938961208
F(3323.2930569601003) = LineSearchPoint{point=PointSample{avg=0.0}, derivative=-3.470903878411495E14}, evalInputDelta = -540.0397938961208
F(23263.0513987207) = LineSearchPoint{point=PointSample{avg=0.0}, derivative=-3.4709038784114944E14}, evalInputDelta = -540.0397938961208
F(162841.3597910449) = LineSearchPoint{point=PointSample{avg=0.0}, derivative=-3.4709038784114944E14}, evalInputDelta = -540.0397938961208
F(1139889.5185373144) = LineSearchPoint{point=PointSample{avg=0.0}, derivative=-3.470903878411495E14}, evalInputDelta = -540.0397938961208
F(7979226.6297612) = LineSearchPoint{point=PointSample{avg=0.0}, derivative=-3.4709038784114944E14}, evalInputDelta = -540.0397938961208
F(5.58545864083284E7) = LineSearchPoint{point=PointSample{avg=0.0}, derivative=-3.470903878411495E14}, evalInputDelta = -540.0397938961208
F(3.909821048582988E8) = LineSearchPoint{point=PointSample{avg=0.0}, derivative=-3.470903878411495E14}, evalInputDelta = -540.0397938961208
F(2.7368747340080914E9) = LineSearchPoint{point=PointSample{avg=0.0}, derivative=-3.470903878411495E14}, evalInputDelta = -540.0397938961208
F(1.915812313805664E10) = LineSearchPoint{point=PointSample{avg=0.0}, derivative=-3.470903878411495E14}, evalInputDelta = -540.0397938961208
0.0 <= 540.0397938961208
F(1.0E10) = LineSearchPoint{point=PointSample{avg=0.0}, derivative=-3.4709038784114956E14}, evalInputDelta = -540.0397938961208
Right bracket at 1.0E10
Converged to right
Fitness changed from 540.0397938961208 to 0.0
Iteration 1 complete. Error: 0.0 Total: 0.3207; Orientation: 0.0022; Line Search: 0.3010
F(0.0) = LineSearchPoint{point=PointSample{avg=0.0}, derivative=-642.3360588800002}
F(1.0E10) = LineSearchPoint{point=PointSample{avg=0.0}, derivative=-642.3360588800001}, evalInputDelta = 0.0
0.0 <= 0.0
Converged to right
Fitness changed from 0.0 to 0.0
Static Iteration Total: 0.0186; Orientation: 0.0011; Line Search: 0.0129
Iteration 2 failed. Error: 0.0
Previous Error: 0.0 -> 0.0
Optimization terminated 2
Final threshold in iteration 2: 0.0 (> 0.0) after 0.340s (< 30.000s)

Returns

    0.0

Training Converged

Limited-Memory BFGS

Next, we apply the same optimization using L-BFGS, which is nearly ideal for purely second-order or quadratic functions.

TrainingTester.java:509 executed in 0.83 seconds (0.000 gc):

    IterativeTrainer iterativeTrainer = new IterativeTrainer(trainable.addRef());
    try {
      iterativeTrainer.setLineSearchFactory(label -> new ArmijoWolfeSearch());
      iterativeTrainer.setOrientation(new LBFGS());
      iterativeTrainer.setMonitor(TrainingTester.getMonitor(history));
      iterativeTrainer.setTimeout(30, TimeUnit.SECONDS);
      iterativeTrainer.setIterationsPerSample(100);
      iterativeTrainer.setMaxIterations(250);
      iterativeTrainer.setTerminateThreshold(0);
      return iterativeTrainer.run();
    } finally {
      iterativeTrainer.freeRef();
    }
Logging
Reset training subject: 4966263437796
Reset training subject: 4966268214756
Adding measurement 299f5e80 to history. Total: 0
LBFGS Accumulation History: 1 points
Constructing line search parameters: GD
Non-optimal measurement 540.0397938961208 < 540.0397938961208. Total: 1
th(0)=540.0397938961208;dx=-1.9024748288122538E26
Adding measurement 58dce0bf to history. Total: 1
New Minimum: 540.0397938961208 > 0.0
Armijo: th(2.154434690031884)=0.0; dx=-3.4709038784114944E14 evalInputDelta=540.0397938961208
Non-optimal measurement 0.0 < 0.0. Total: 2
Armijo: th(1.077217345015942)=0.0; dx=-3.4709038784114944E14 evalInputDelta=540.0397938961208
Non-optimal measurement 0.0 < 0.0. Total: 2
Armijo: th(0.3590724483386473)=0.0; dx=-3.4709038784114944E14 evalInputDelta=540.0397938961208
Non-optimal measurement 0.0 < 0.0. Total: 2
Armijo: th(0.08976811208466183)=0.0; dx=-3.4709038784114944E14 evalInputDelta=540.0397938961208
Non-optimal measurement 0.0 < 0.0. Total: 2
Armijo: th(0.017953622416932366)=0.0; dx=-3.4709038784114944E14 evalInputDelta=540.0397938961208
Non-optimal measurement 0.0 < 0.0. Total: 2
Armijo: th(0.002992270402822061)=0.0; dx=-3.4709038784114944E14 evalInputDelta=540.0397938961208
Non-optimal measurement 0.0 < 0.0. Total: 2
Armijo: th(4.2746720040315154E-4)=0.0; dx=-3.4709038784114944E14 evalInputDelta=540.0397938961208
Non-optimal measurement 0.0 < 0.0. Total: 2
Armijo: th(5.343340005039394E-5)=0.0; dx=-3.4709038784114944E14 evalInputDelta=540.0397938961208
Non-optimal measurement 0.0 < 0.0. Total: 2
Armijo: th(5.9370444500437714E-6)=0.0; dx=-3.4709038784114944E14 evalInputDelta=540.0397938961208
Non-optimal measurement 0.0 < 0.0. Total: 2
Armijo: th(5.937044450043771E-7)=0.0; dx=-3.4709038784114944E14 evalInputDelta=540.0397938961208
Non-optimal measurement 0.0 < 0.0. Total: 2
Armijo: th(5.397313136403428E-8)=0.0; dx=-3.4709038784114944E14 evalInputDelta=540.0397938961208
Non-optimal measurement 0.0 < 0.0. Total: 2
Armijo: th(4.4977609470028565E-9)=0.0; dx=-3.4709038784114944E14 evalInputDelta=540.0397938961208
Non-optimal measurement 0.0 < 0.0. Total: 2
Armijo: th(3.4598161130791205E-10)=0.0; dx=-3.4709038784114944E14 evalInputDelta=540.0397938961208
Non-optimal measurement 0.0 < 0.0. Total: 2
Armijo: th(2.4712972236279432E-11)=0.0; dx=-3.4709038784114944E14 evalInputDelta=540.0397938961208
Non-optimal measurement 0.0 < 0.0. Total: 2
Armijo: th(1.6475314824186289E-12)=0.0; dx=-3.4709038784114944E14 evalInputDelta=540.0397938961208
Non-optimal measurement 258.54442882610743 < 0.0. Total: 2
Armijo: th(1.029707176511643E-13)=258.54442882610743; dx=-8.907855872080054E25 evalInputDelta=281.4953650700134
Non-optimal measurement 528.9641078099528 < 0.0. Total: 2
Armijo: th(6.057101038303783E-15)=528.9641078099528; dx=-1.8667133696107454E26 evalInputDelta=11.075686086168048
Non-optimal measurement 0.0 < 0.0. Total: 2
MIN ALPHA (3.3650561323909904E-16): th(2.154434690031884)=0.0
Fitness changed from 540.0397938961208 to 0.0
Iteration 1 complete. Error: 0.0 Total: 0.7927; Orientation: 0.0998; Line Search: 0.6793
Non-optimal measurement 0.0 < 0.0. Total: 2
LBFGS Accumulation History: 2 points
Non-optimal measurement 0.0 < 0.0. Total: 2
th(0)=0.0;dx=-642.3360588800002
Non-optimal measurement 0.0 < 0.0. Total: 2
Armijo: th(2.154434690031884E-15)=0.0; dx=-642.3360588800002 evalInputDelta=0.0
Non-optimal measurement 0.0 < 0.0. Total: 2
Armijo: th(1.077217345015942E-15)=0.0; dx=-642.3360588800002 evalInputDelta=0.0
Non-optimal measurement 0.0 < 0.0. Total: 2
MIN ALPHA (3.5907244833864734E-16): th(0.0)=0.0
Fitness changed from 0.0 to 0.0
Static Iteration Total: 0.0335; Orientation: 0.0028; Line Search: 0.0259
Iteration 2 failed. Error: 0.0
Previous Error: 0.0 -> 0.0
Optimization terminated 2
Final threshold in iteration 2: 0.0 (> 0.0) after 0.826s (< 30.000s)

Returns

    0.0

Training Converged

TrainingTester.java:432 executed in 0.11 seconds (0.000 gc):

    return TestUtil.compare(title + " vs Iteration", runs);
Logging
Plotting range=[0.0, 0.0], [2.0, 1.0]; valueStats=DoubleSummaryStatistics{count=0, sum=0.000000, min=Infinity, average=0.000000, max=-Infinity}
Only 0 points for GD
Only 0 points for CjGD
Only 0 points for LBFGS

Returns

Result

TrainingTester.java:435 executed in 0.00 seconds (0.000 gc):

    return TestUtil.compareTime(title + " vs Time", runs);
Logging
No Data

Results

TrainingTester.java:255 executed in 0.00 seconds (0.000 gc):

    return grid(inputLearning, modelLearning, completeLearning);

Returns

Result

TrainingTester.java:258 executed in 0.00 seconds (0.000 gc):

    return new ComponentResult(null == inputLearning ? null : inputLearning.value,
        null == modelLearning ? null : modelLearning.value, null == completeLearning ? null : completeLearning.value);

Returns

    {"input":{ "LBFGS": { "type": "Converged", "value": 0.0 }, "CjGD": { "type": "Converged", "value": 0.0 }, "GD": { "type": "Converged", "value": 0.0 } }, "model":null, "complete":null}

LayerTests.java:425 executed in 0.00 seconds (0.000 gc):

    throwException(exceptions.addRef());

Results

detailsresult
{"input":{ "LBFGS": { "type": "Converged", "value": 0.0 }, "CjGD": { "type": "Converged", "value": 0.0 }, "GD": { "type": "Converged", "value": 0.0 } }, "model":null, "complete":null}OK
  {
    "result": "OK",
    "performance": {
      "execution_time": "2.302",
      "gc_time": "0.260"
    },
    "created_on": 1586739553197,
    "file_name": "trainingTest",
    "report": {
      "simpleName": "Basic",
      "canonicalName": "com.simiacryptus.mindseye.layers.java.RescaledSubnetLayerTest.Basic",
      "link": "https://github.com/SimiaCryptus/mindseye-java/tree/93db34cedee48c0202777a2b25deddf1dfaf5731/src/test/java/com/simiacryptus/mindseye/layers/java/RescaledSubnetLayerTest.java",
      "javaDoc": ""
    },
    "training_analysis": {
      "input": {
        "LBFGS": {
          "type": "Converged",
          "value": 0.0
        },
        "CjGD": {
          "type": "Converged",
          "value": 0.0
        },
        "GD": {
          "type": "Converged",
          "value": 0.0
        }
      }
    },
    "archive": "s3://code.simiacrypt.us/tests/com/simiacryptus/mindseye/layers/java/RescaledSubnetLayer/Basic/trainingTest/202004135913",
    "id": "ac456f3a-590b-44af-bb3b-9a89555afbc9",
    "report_type": "Components",
    "display_name": "Comparative Training",
    "target": {
      "simpleName": "RescaledSubnetLayer",
      "canonicalName": "com.simiacryptus.mindseye.layers.java.RescaledSubnetLayer",
      "link": "https://github.com/SimiaCryptus/mindseye-java/tree/93db34cedee48c0202777a2b25deddf1dfaf5731/src/main/java/com/simiacryptus/mindseye/layers/java/RescaledSubnetLayer.java",
      "javaDoc": ""
    }
  }