1. Test Modules
  2. Training Characteristics
    1. Input Learning
      1. Gradient Descent
      2. Conjugate Gradient Descent
      3. Limited-Memory BFGS
    2. Results
  3. Results

Subreport: Logs for com.simiacryptus.ref.lang.ReferenceCountingBase

Test Modules

Using Seed 6918460655378298880

Training Characteristics

Input Learning

In this apply, we use a network to learn this target input, given it's pre-evaluated output:

TrainingTester.java:332 executed in 0.04 seconds (0.000 gc):

    return RefArrays.stream(RefUtil.addRef(input_target)).flatMap(RefArrays::stream).map(x -> {
      try {
        return x.prettyPrint();
      } finally {
        x.freeRef();
      }
    }).reduce((a, b) -> a + "\n" + b).orElse("");

Returns

    [
    	[ [ -0.136, 0.332, 1.8 ], [ -1.656, 1.08, -0.164 ], [ 0.68, 1.836, -0.828 ], [ -0.064, 0.976, -0.888 ], [ 1.94, 1.956, 0.464 ], [ 0.436, 1.6, -0.388 ], [ -0.936, 0.14, 0.716 ], [ -0.6, -1.504, 1.204 ], ... ],
    	[ [ -0.876, 1.7, -0.028 ], [ -0.26, -1.356, -0.636 ], [ -1.204, -1.912, -1.44 ], [ -0.54, -1.756, 0.232 ], [ 0.772, -1.436, -1.684 ], [ 0.836, -1.296, 0.936 ], [ 0.852, -0.16, -1.44 ], [ -0.192, 1.056, -0.596 ], ... ],
    	[ [ 0.072, 1.604, -1.436 ], [ 1.524, -1.728, 0.032 ], [ -1.336, -1.584, -1.22 ], [ 0.216, -1.884, -1.496 ], [ -1.5, -0.532, 0.868 ], [ 1.708, 1.436, 0.86 ], [ 1.976, 1.404, -1.38 ], [ -1.74, -1.976, -1.604 ], ... ],
    	[ [ 0.064, -1.856, -1.324 ], [ 1.084, 1.272, 1.376 ], [ 1.632, -0.816, 1.044 ], [ -0.608, -1.144, 1.816 ], [ 0.076, 0.884, -1.66 ], [ 1.692, -0.16, 1.216 ], [ -1.472, -0.228, -0.508 ], [ 1.068, -0.2, 0.74 ], ... ],
    	[ [ -1.828, -0.94, 0.38 ], [ 1.66, 1.4, -1.272 ], [ 0.892, 0.332, 0.652 ], [ 1.604, -0.888, -1.14 ], [ 1.032, -1.608, -0.116 ], [ 0.804, -1.392, -1.36 ], [ -1.852, -0.708, -0.848 ], [ -0.404, 0.332, -1.58 ], ... ],
    	[ [ 0.988, 1.248, -1.428 ], [ 0.852, -0.764, -0.712 ], [ -0.188, -0.26, 1.416 ], [ -0.664, 1.248, -1.496 ], [ -0.524, -1.008, 0.468 ], [ 0.18, 1.288, -0.216 ], [ -1.852, 0.556, 1.832 ], [ 1.572, -0.932, -0.8 ], ... ],
    	[ [ -1.076, 1.18, -0.316 ], [ 1.484, 0.408, -1.38 ], [ 1.836, 0.812, 0.98 ], [ 0.212, 1.828, -1.664 ], [ 0.812, 1.828, 0.068 ], [ -0.6, 0.864, 0.608 ], [ -0.472, 1.384, -0.436 ], [ 0.66, 1.864, 1.928 ], ... ],
    	[ [ 1.392, -1.812, 1.488 ], [ -1.908, 0.488, 1.92 ], [ -1.652, -0.528, -1.06 ], [ -1.116, -0.12, -0.948 ], [ -1.004, 0.38, -0.692 ], [ 1.092, -1.152, 0.012 ], [ 0.0, 1.032, 0.872 ], [ 0.328, -0.012, 0.476 ], ... ],
    	...
    ]
    [
    	[ [ -1.46, -0.38, 0.612 ], [ -1.528, 0.224, 0.208 ], [ 1.032, -0.556, -1.98 ], [ 1.528, 0.312, 1.348 ], [ 0.516, -1.54, 0.636 ], [ 0.656, -0.656, 0.64 ], [ 1.16, -0.868, 1.888 ], [ -0.868, -0.504, -0.936 ], ... ],
    	[ [ 0.28, -1.78, -0.528 ], [ -1.608, -0.308, 0.664 ], [ 0.248, 0.12, 0.248 ], [ -0.464, 0.988, 0.288 ], [ -1.26, 1.88, 0.412 ], [ 1.296, 1.884, -1.916 ], [ -0.052, 1.44, -0.48 ], [ 0.428, 0.132, 0.332 ], ... ],
    	[ [ 0.676, -1.092, -0.152 ], [ 0.768, -1.7, 1.592 ], [ 0.612, -1.44, 1.736 ], [ -1.36, -1.772, -1.444 ], [ -1.792, 1.236, 0.06 ], [ -0.54, 0.796, -1.016 ], [ -1.052, 0.44, 1.804 ], [ -1.296, 1.668, 0.236 ], ... ],
    	[ [ 1.832, 0.28, 1.364 ], [ -0.096, 0.468, -1.908 ], [ 1.672, -0.44, 0.992 ], [ -1.408, -1.764, 0.2 ], [ -0.2, 0.092, 0.168 ], [ 1.932, -1.608, -0.132 ], [ 1.264, -1.592, -0.92 ], [ 1.476, -0.876, 0.836 ], ... ],
    	[ [ -0.288, -1.996, -0.412 ], [ 1.264, 1.92, -1.32 ], [ -1.892, -0.676, -1.864 ], [ 0.36, -0.116, -0.168 ], [ -0.62, 0.084, 1.796 ], [ 1.508, -1.116, 1.544 ], [ -1.52, -0.592, -1.8 ], [ -0.272, -1.5, 0.48 ], ... ],
    	[ [ -1.072, 1.204, 0.516 ], [ 0.772, 1.488, 1.312 ], [ -1.768, -0.748, 1.444 ], [ -0.792, -1.432, -0.788 ], [ -1.616, 1.504, -1.644 ], [ 1.156, 0.656, -1.408 ], [ 1.544, 1.84, -0.368 ], [ 1.232, 0.804, -0.416 ], ... ],
    	[ [ 1.116, 0.456, -0.932 ], [ -0.84, 1.384, -0.708 ], [ 0.524, 0.968, -1.968 ], [ -0.06, -1.34, -0.556 ], [ 1.952, 1.824, 0.908 ], [ -1.936, -0.36, -1.776 ], [ 0.052, -0.98, -1.656 ], [ 0.812, 0.924, 1.728 ], ... ],
    	[ [ 1.376, 1.42, -0.556 ], [ 1.236, 0.528, 0.66 ], [ 1.892, -1.148, -1.66 ], [ -1.064, -1.12, -1.368 ], [ -0.332, -1.884, -0.44 ], [ -0.128, 1.28, 0.204 ], [ 1.476, 1.876, 1.872 ], [ -1.86, -1.284, -1.488 ], ... ],
    	...
    ]
    [
    	[ [ 1.288, -1.488, -0.536 ], [ -0.52, 0.64, -0.144 ], [ -0.128, 1.768, -0.556 ], [ 0.856, -1.668, -0.944 ], [ 0.456, -1.552, 0.368 ], [ 0.792, 0.124, -0.776 ], [ 0.372, -0.552, -1.48 ], [ 0.04, 0.464, 1.776 ], ... ],
    	[ [ -1.548, -1.864, 1.536 ], [ -0.804, -1.02, -0.852 ], [ 0.028, -1.584, 0.864 ], [ -0.86, 1.688, 0.892 ], [ -0.54, 0.9, -0.952 ], [ 1.384, -1.52, -1.696 ], [ -1.932, 1.976, -1.872 ], [ 1.128, 0.496, 1.908 ], ... ],
    	[ [ -1.448, -1.36, -1.172 ], [ -1.78, 0.448, -0.908 ], [ -0.94, 0.124, 0.616 ], [ 1.408, -0.62, 0.2 ], [ 1.02, -0.632, 0.668 ], [ -1.692, -1.656, 0.98 ], [ 1.872, 1.84, 1.168 ], [ -1.644, 1.672, 1.212 ], ... ],
    	[ [ 1.012, -0.196, -1.824 ], [ 1.12, 0.644, 0.172 ], [ -0.792, 0.124, -0.532 ], [ -0.764, -0.924, -0.148 ], [ 0.368, -0.884, -1.612 ], [ 0.888, -0.412, 1.648 ], [ -1.248, -1.096, -0.428 ], [ 0.112, -0.348, 1.168 ], ... ],
    	[ [ -1.54, -1.78, -0.736 ], [ 1.336, -1.356, -0.28 ], [ 1.692, -0.496, 1.652 ], [ 0.776, -1.776, 1.844 ], [ -0.604, 1.78, -1.588 ], [ -0.368, -1.684, -0.816 ], [ -0.28, 0.42, -0.268 ], [ -1.332, -1.076, -1.836 ], ... ],
    	[ [ 0.884, 1.052, 1.996 ], [ 1.032, -1.128, -0.86 ], [ -1.404, -0.756, 1.148 ], [ -1.556, 1.292, 0.744 ], [ 0.336, -1.984, 0.564 ], [ 0.364, 0.628, -0.704 ], [ -0.604, -0.244, -0.128 ], [ 0.636, 0.336, 1.276 ], ... ],
    	[ [ 1.272, -1.12, -1.456 ], [ 0.164, 0.608, 0.852 ], [ -0.44, 0.788, -1.38 ], [ 1.356, 1.536, -0.308 ], [ 0.268, -0.572, -0.6 ], [ -1.636, 0.86, 0.588 ], [ 1.464, 1.612, -1.604 ], [ -1.452, -1.68, 0.904 ], ... ],
    	[ [ -1.108, -1.448, 0.432 ], [ 0.08, 1.1, -0.008 ], [ -1.756, 1.404, 1.228 ], [ 1.544, 0.52, -1.096 ], [ -1.876, 0.924, -0.62 ], [ -0.868, -0.12, 0.428 ], [ -0.796, 1.316, 0.012 ], [ -0.8, -0.344, -1.552 ], ... ],
    	...
    ]
    [
    	[ [ 0.068, -1.436, 0.896 ], [ 1.212, 1.22, 1.756 ], [ -1.208, 0.468, -0.992 ], [ 1.232, -0.004, -0.864 ], [ 0.904, -0.232, 0.76 ], [ 0.528, -1.74, 0.7 ], [ -1.696, -0.376, -0.888 ], [ 1.004, 1.58, 0.192 ], ... ],
    	[ [ 0.132, -0.712, 1.928 ], [ 0.136, -1.556, 0.5 ], [ 0.46, 1.616, -1.628 ], [ -1.056, -0.932, 1.212 ], [ -0.172, -1.236, -1.88 ], [ -0.944, -0.168, -1.648 ], [ 0.048, 1.788, 0.016 ], [ -1.304, -0.268, -0.268 ], ... ],
    	[ [ 0.524, 0.464, 1.276 ], [ 0.84, -0.336, 1.6 ], [ 1.452, -1.888, -1.172 ], [ -0.524, 0.896, 0.712 ], [ 0.076, -0.4, 1.708 ], [ -1.236, -1.7, 0.876 ], [ 1.436, -1.516, -0.7 ], [ 1.028, -0.168, -0.728 ], ... ],
    	[ [ 0.72, 1.276, 0.98 ], [ 1.144, -0.648, 0.588 ], [ 0.752, -1.356, 1.296 ], [ -1.592, -0.108, 0.796 ], [ 1.988, 1.18, -0.448 ], [ -1.136, -0.664, 1.668 ], [ 1.768, -1.84, -0.004 ], [ 1.5, 1.492, 0.552 ], ... ],
    	[ [ -1.484, -1.412, -1.288 ], [ 0.928, 1.152, -1.864 ], [ 0.56, 0.688, -1.504 ], [ -0.252, 0.908, 1.22 ], [ 1.728, -1.252, -1.536 ], [ 0.608, 1.324, 1.9 ], [ -1.872, -1.344, 0.396 ], [ -1.456, 0.012, -0.352 ], ... ],
    	[ [ -1.736, 1.476, 1.492 ], [ 1.74, -1.104, -0.232 ], [ 1.156, 0.836, 1.904 ], [ 1.544, 1.2, 0.924 ], [ -0.94, -1.596, -0.928 ], [ 0.5, 0.18, -1.944 ], [ -0.444, -1.944, -1.94 ], [ 0.38, -1.152, -1.16 ], ... ],
    	[ [ 1.388, -1.88, -0.448 ], [ 1.26, 1.504, 1.912 ], [ 0.472, 0.944, -0.408 ], [ 0.924, 1.528, 1.16 ], [ 1.476, 0.188, 0.904 ], [ -0.252, 0.548, -1.564 ], [ -1.612, 1.808, -1.848 ], [ 0.04, -1.124, -0.272 ], ... ],
    	[ [ 1.716, -0.128, -0.596 ], [ 0.344, 0.608, 0.996 ], [ -0.408, 0.412, -1.236 ], [ -1.924, -0.564, 0.744 ], [ 1.172, -0.048, -1.892 ], [ 1.592, 0.24, -1.352 ], [ 0.808, 0.124, 0.096 ], [ -0.564, -1.544, -0.608 ], ... ],
    	...
    ]
    [
    	[ [ -0.684, -1.26, -1.872 ], [ -1.38, 1.828, -0.964 ], [ 1.492, -1.172, 1.608 ], [ 1.136, -0.824, -0.196 ], [ 0.164, -1.444, 0.184 ], [ -1.72, -0.076, 1.968 ], [ -0.192, 1.648, -1.116 ], [ -0.964, -1.792, 0.44 ], ... ],
    	[ [ 1.584, -1.316, 1.68 ], [ -0.996, 1.656, -0.048 ], [ 0.588, 0.836, -0.952 ], [ 1.332, -0.844, -0.732 ], [ 1.972, -0.416, 1.62 ], [ 1.344, 1.236, 0.096 ], [ 0.248, -1.868, -1.016 ], [ -0.364, 0.484, 1.876 ], ... ],
    	[ [ -1.888, -0.816, 1.556 ], [ 0.416, -1.088, 1.04 ], [ 0.1, -0.612, 1.98 ], [ -0.788, 1.032, 0.432 ], [ 1.396, 1.36, 0.944 ], [ 0.236, -0.624, 0.552 ], [ -0.024, 0.48, 0.8 ], [ -0.46, -1.82, 0.968 ], ... ],
    	[ [ -0.396, 1.6, 0.292 ], [ 1.716, -0.296, -1.364 ], [ 0.78, -1.808, 1.144 ], [ -1.072, 1.004, -0.264 ], [ -1.116, 0.412, -0.492 ], [ 1.524, -1.488, 0.568 ], [ 1.596, 0.744, 1.524 ], [ 1.212, -0.612, 0.12 ], ... ],
    	[ [ 0.952, 1.596, -0.32 ], [ 0.992, -1.756, 0.936 ], [ 0.42, -0.036, 1.512 ], [ -0.224, -0.748, -1.216 ], [ 0.908, -1.444, -1.172 ], [ 1.308, -0.956, -1.892 ], [ 0.264, 0.604, -0.888 ], [ 1.1, 0.904, -1.516 ], ... ],
    	[ [ 0.312, 0.82, 0.732 ], [ -1.736, 1.1, 1.508 ], [ -1.128, -1.788, 1.032 ], [ 1.504, 0.56, 0.664 ], [ 1.368, 1.952, 0.028 ], [ -0.504, -0.04, 0.96 ], [ -1.9

...skipping 9740 bytes...

    1.256, 1.644 ], [ 0.984, -0.94, 0.916 ], [ -1.824, 1.656, 0.116 ], [ 0.84, -1.576, -1.12 ], [ -1.672, 1.188, -0.544 ], [ 0.836, -0.568, -0.18 ], ... ],
    	[ [ 0.596, 0.88, 0.832 ], [ -0.76, 0.68, -1.196 ], [ -1.896, -1.188, -1.284 ], [ 0.796, -1.512, -0.5 ], [ 1.24, -0.68, -1.9 ], [ 0.236, 1.536, 1.952 ], [ -1.868, 0.492, -0.608 ], [ -1.836, -1.716, -1.496 ], ... ],
    	[ [ -1.46, 1.956, -0.064 ], [ 0.516, -1.76, 0.348 ], [ -1.756, 0.496, 0.332 ], [ 1.376, 1.572, 0.524 ], [ 0.268, -1.616, 1.464 ], [ -0.708, -1.18, 1.476 ], [ 0.82, -0.912, 0.416 ], [ -1.156, 0.064, -0.16 ], ... ],
    	[ [ 1.936, -1.816, 1.168 ], [ 1.016, 0.648, -1.46 ], [ -1.348, 0.692, 1.54 ], [ 1.04, -0.344, -0.368 ], [ -0.736, 1.492, -0.576 ], [ 0.436, -0.316, -0.688 ], [ -0.768, 0.364, -1.016 ], [ -0.204, 1.296, -0.348 ], ... ],
    	[ [ 0.884, 0.884, -1.568 ], [ 0.644, 1.816, 1.112 ], [ 0.752, -0.356, 1.26 ], [ 0.78, -0.804, 0.52 ], [ 1.124, -0.92, 0.092 ], [ 0.008, 1.448, -1.34 ], [ -1.252, -0.62, -0.176 ], [ -1.748, 0.132, -0.836 ], ... ],
    	[ [ 1.288, 1.472, 1.396 ], [ 1.78, 0.82, 1.432 ], [ -0.24, -1.116, 0.3 ], [ 0.224, 1.644, 1.52 ], [ 1.496, -0.592, 0.412 ], [ 1.608, 0.732, 0.884 ], [ 1.292, 0.92, 0.424 ], [ -1.164, 1.544, 0.012 ], ... ],
    	...
    ]
    [
    	[ [ -1.404, 0.644, -0.244 ], [ -1.756, 0.756, -1.172 ], [ 0.892, -1.576, -0.592 ], [ -0.852, -0.516, 1.936 ], [ 0.492, 0.948, 1.176 ], [ 0.68, 0.052, -0.136 ], [ -1.556, 1.532, 1.672 ], [ 0.46, 0.568, -1.892 ], ... ],
    	[ [ -0.932, -1.604, -1.516 ], [ 1.428, -1.432, -0.752 ], [ 0.572, -1.968, 0.272 ], [ -0.22, 0.516, 0.456 ], [ -1.868, 1.76, 1.752 ], [ 0.06, -0.34, -0.34 ], [ 0.128, -0.3, -1.892 ], [ 1.884, -1.58, 1.54 ], ... ],
    	[ [ 0.932, -1.08, -1.632 ], [ -0.144, 1.224, -1.304 ], [ -0.048, -0.556, 0.48 ], [ 1.604, 0.74, 1.832 ], [ 1.676, 1.468, -0.5 ], [ -0.768, 1.028, -0.844 ], [ 0.34, 0.804, 1.412 ], [ 0.3, -0.308, 1.104 ], ... ],
    	[ [ 1.62, -1.984, 0.56 ], [ 1.488, 0.084, -1.344 ], [ 0.616, -1.828, 0.088 ], [ -0.236, -0.124, 0.78 ], [ -1.364, -0.452, -0.488 ], [ -1.168, -0.996, -1.016 ], [ 0.048, 1.776, 1.284 ], [ -1.192, 0.588, -0.472 ], ... ],
    	[ [ 0.284, -1.62, -0.184 ], [ -0.236, 1.028, 1.424 ], [ -0.044, -0.432, -1.02 ], [ 1.824, -0.84, -0.008 ], [ 1.132, 1.444, 1.488 ], [ 1.604, -1.732, -1.472 ], [ 1.228, -1.916, -1.604 ], [ 0.152, 0.828, -0.432 ], ... ],
    	[ [ -0.18, 1.572, 1.04 ], [ 1.248, 1.164, -0.96 ], [ 1.532, -1.04, 1.772 ], [ -0.928, -1.66, -0.236 ], [ -1.132, -0.628, 1.108 ], [ 1.728, 1.512, 0.14 ], [ -0.244, -0.444, 1.772 ], [ 0.492, -0.608, -0.772 ], ... ],
    	[ [ 1.196, 0.576, -1.388 ], [ 1.464, 1.428, 1.912 ], [ 0.908, -0.052, 0.684 ], [ 1.328, 1.78, -0.42 ], [ -1.4, -0.228, 1.408 ], [ -2.0, -1.848, 1.62 ], [ 1.4, -0.608, -0.216 ], [ -1.944, -1.984, -1.952 ], ... ],
    	[ [ 0.4, 1.24, 0.552 ], [ -1.636, 0.32, 0.784 ], [ 1.032, 0.124, -1.32 ], [ 0.188, -1.0, -0.784 ], [ -0.292, -1.052, 1.94 ], [ -1.992, 0.292, -0.336 ], [ -1.672, 0.312, 1.268 ], [ -0.968, -0.732, 1.136 ], ... ],
    	...
    ]
    [
    	[ [ 1.696, 1.468, 0.336 ], [ -1.632, -0.532, -1.152 ], [ 1.864, -1.104, -1.324 ], [ -0.592, -1.62, 1.232 ], [ -1.9, -0.488, -0.576 ], [ 0.448, -1.74, 1.832 ], [ -0.384, -1.66, -0.228 ], [ 1.092, -0.836, -0.084 ], ... ],
    	[ [ -0.552, 0.76, 1.372 ], [ -0.308, 1.144, -0.64 ], [ 1.564, 1.172, 0.192 ], [ -0.712, 0.024, -1.128 ], [ -0.34, -0.692, -1.884 ], [ -1.988, 1.884, -1.32 ], [ 0.212, -0.032, -1.528 ], [ -1.776, -0.492, 1.676 ], ... ],
    	[ [ 0.328, -0.776, -0.384 ], [ -0.184, -0.52, 0.164 ], [ -1.936, -0.716, -0.48 ], [ -1.496, -1.804, -1.028 ], [ 0.34, -0.84, 0.06 ], [ 1.656, -1.736, -0.496 ], [ 1.116, -1.128, -1.124 ], [ 0.896, -0.264, 0.832 ], ... ],
    	[ [ -0.236, 1.484, -0.064 ], [ 1.024, 1.196, -1.184 ], [ 1.204, -1.9, -1.124 ], [ 0.62, -1.848, 0.792 ], [ 0.716, -0.172, 0.972 ], [ 1.332, 0.056, 1.764 ], [ 0.264, -0.144, -1.78 ], [ -1.232, 0.336, -1.004 ], ... ],
    	[ [ 1.932, -0.316, -1.844 ], [ 0.86, 0.772, -1.78 ], [ -0.272, -1.056, -0.596 ], [ 1.9, -0.22, 0.816 ], [ 1.58, 1.704, -1.828 ], [ 1.708, 1.616, -0.604 ], [ -1.472, -1.464, -1.084 ], [ 1.336, 1.656, -0.912 ], ... ],
    	[ [ -0.776, 1.716, -0.796 ], [ 0.336, -0.988, 1.664 ], [ -0.544, 1.284, 1.028 ], [ 1.864, 1.028, -0.728 ], [ 0.336, -1.464, -0.24 ], [ -0.028, 1.436, -1.352 ], [ 0.912, 0.656, 1.048 ], [ 0.512, -0.12, -0.988 ], ... ],
    	[ [ 1.656, 0.908, -0.204 ], [ -1.824, 0.4, -0.564 ], [ -1.26, 1.228, -1.32 ], [ -0.656, -0.336, -1.212 ], [ -1.484, 0.48, -1.808 ], [ -0.792, 0.988, 0.312 ], [ 0.548, 1.524, -1.828 ], [ 1.9, -1.756, 0.5 ], ... ],
    	[ [ -1.292, -0.5, -1.904 ], [ 0.72, 1.364, 0.332 ], [ 0.664, 1.924, 0.964 ], [ -1.532, 1.016, -0.452 ], [ -0.652, 0.192, -0.096 ], [ -0.316, -0.344, -1.376 ], [ -0.448, 1.74, 1.52 ], [ 0.16, 0.048, -0.236 ], ... ],
    	...
    ]
    [
    	[ [ 1.024, 1.428, 0.712 ], [ 0.6, -1.428, -1.972 ], [ -0.128, -0.22, -1.668 ], [ 0.184, 1.54, -0.44 ], [ 1.676, 0.44, -0.312 ], [ 1.352, 0.768, -0.396 ], [ -0.948, 1.056, 0.216 ], [ -0.96, 0.0, 0.236 ], ... ],
    	[ [ -1.724, -0.872, -0.124 ], [ 1.68, -0.656, -1.012 ], [ 0.4, 1.3, 0.972 ], [ 0.092, -1.8, -0.916 ], [ 1.428, -0.28, -1.088 ], [ 0.072, -1.684, 0.876 ], [ 1.856, -1.58, 0.556 ], [ -1.472, 0.372, 0.984 ], ... ],
    	[ [ 0.228, 0.412, 1.58 ], [ -1.752, 0.052, -1.42 ], [ -0.172, 0.524, -0.644 ], [ 0.48, -1.02, -1.588 ], [ 0.344, 0.004, 0.64 ], [ -0.812, 1.94, -1.796 ], [ 1.184, -1.828, -1.244 ], [ -1.556, 1.928, -0.6 ], ... ],
    	[ [ 0.356, 1.092, -0.692 ], [ 0.788, 1.312, 0.928 ], [ -0.348, -1.412, -1.324 ], [ 1.484, 1.484, -1.04 ], [ -0.144, -0.5, -0.084 ], [ 1.864, 0.492, 0.708 ], [ -1.396, -1.636, -0.992 ], [ 1.46, 0.088, 1.9 ], ... ],
    	[ [ -1.432, 1.176, -1.308 ], [ -1.316, 0.136, 0.548 ], [ -1.372, -0.156, 1.448 ], [ -0.188, -1.348, 0.6 ], [ -0.472, -1.08, 1.688 ], [ 0.82, 0.152, 0.84 ], [ 1.032, 1.756, 1.392 ], [ 1.424, 1.14, -0.476 ], ... ],
    	[ [ -0.216, 1.024, -1.8 ], [ -0.568, -1.74, 1.428 ], [ -0.68, 0.536, -1.02 ], [ 0.024, -1.164, -1.484 ], [ 0.976, -1.568, 1.296 ], [ 0.828, 0.84, 1.016 ], [ 1.1, 0.548, -0.272 ], [ 1.888, -0.056, -0.188 ], ... ],
    	[ [ -0.696, 1.768, 1.812 ], [ -1.152, -1.08, -1.232 ], [ 0.412, 1.864, -1.264 ], [ -0.276, 0.44, -1.996 ], [ -1.396, 0.408, -1.764 ], [ -0.2, -0.556, 0.884 ], [ -1.956, -1.16, 1.26 ], [ 0.408, -0.296, 0.872 ], ... ],
    	[ [ -0.256, -0.048, -1.088 ], [ 0.388, -1.172, -0.64 ], [ -1.936, -0.94, 0.536 ], [ 0.42, 0.816, -1.584 ], [ -1.04, 1.376, -1.688 ], [ 0.58, -0.428, -1.884 ], [ 0.184, 0.74, -1.18 ], [ 0.584, -1.204, 1.352 ], ... ],
    	...
    ]
    [
    	[ [ -0.92, 0.98, -0.736 ], [ 1.56, -1.64, 0.796 ], [ 0.224, -0.32, -1.152 ], [ -1.088, -0.056, -1.984 ], [ 1.632, 1.968, -0.396 ], [ 0.908, -0.304, -0.884 ], [ 1.444, -1.288, 1.876 ], [ -1.628, 1.488, -1.776 ], ... ],
    	[ [ -0.172, 1.576, 0.536 ], [ -1.484, -0.352, -0.484 ], [ -1.096, -0.28, 1.612 ], [ 1.6, 1.524, -0.84 ], [ 0.32, -1.084, -1.04 ], [ -1.312, 0.164, -0.596 ], [ 1.264, -0.276, 1.324 ], [ -0.408, 1.688, -0.376 ], ... ],
    	[ [ 0.28, -1.92, -1.416 ], [ -0.348, -1.972, 0.948 ], [ 1.524, -0.284, -1.024 ], [ -0.284, -1.304, 0.876 ], [ 0.472, 0.484, -0.592 ], [ 1.048, 1.98, 0.476 ], [ -1.444, -0.864, 1.292 ], [ 1.856, -1.092, -0.7 ], ... ],
    	[ [ -1.996, 1.228, 1.824 ], [ -1.884, 1.076, 1.696 ], [ -1.304, 1.412, 1.42 ], [ 1.264, -1.724, 0.156 ], [ 1.144, 0.884, 1.864 ], [ -1.992, 0.724, -1.428 ], [ -0.924, 1.628, 1.976 ], [ -1.172, 0.744, -1.448 ], ... ],
    	[ [ -0.976, 0.328, 1.364 ], [ 0.052, -0.296, 1.616 ], [ -0.568, 1.964, 0.864 ], [ 0.492, 0.632, 0.684 ], [ 0.08, 1.228, 1.172 ], [ 0.252, 1.836, 0.268 ], [ -0.444, -0.064, 1.544 ], [ 0.116, -0.284, -1.424 ], ... ],
    	[ [ -1.744, 0.048, 0.484 ], [ -1.756, -1.984, 2.0 ], [ 0.384, 0.328, 1.404 ], [ -1.796, 0.948, 0.568 ], [ 1.888, -0.128, -1.044 ], [ 1.016, 1.636, 1.228 ], [ 1.032, 1.316, -1.536 ], [ -1.896, 1.36, -0.804 ], ... ],
    	[ [ -1.22, 1.956, 0.132 ], [ 1.552, -0.724, 0.76 ], [ -1.152, 1.452, -1.892 ], [ -0.324, 1.048, 0.292 ], [ -1.2, -1.064, -0.964 ], [ -1.328, -1.424, -0.072 ], [ 1.636, -0.772, 1.464 ], [ 1.82, -0.564, -0.28 ], ... ],
    	[ [ -0.792, 0.704, -1.648 ], [ -1.54, 0.712, -0.86 ], [ -0.012, 1.704, -0.472 ], [ -0.12, 0.768, 1.12 ], [ 1.74, 1.596, -0.104 ], [ 1.532, -1.324, 0.632 ], [ -1.764, 1.996, 1.084 ], [ 0.396, -1.528, 0.252 ], ... ],
    	...
    ]

Gradient Descent

First, we train using basic gradient descent method apply weak line search conditions.

TrainingTester.java:480 executed in 44.68 seconds (7.222 gc):

    IterativeTrainer iterativeTrainer = new IterativeTrainer(trainable.addRef());
    try {
      iterativeTrainer.setLineSearchFactory(label -> new ArmijoWolfeSearch());
      iterativeTrainer.setOrientation(new GradientDescent());
      iterativeTrainer.setMonitor(TrainingTester.getMonitor(history));
      iterativeTrainer.setTimeout(30, TimeUnit.SECONDS);
      iterativeTrainer.setMaxIterations(250);
      iterativeTrainer.setTerminateThreshold(0);
      return iterativeTrainer.run();
    } finally {
      iterativeTrainer.freeRef();
    }
Logging
Reset training subject: 2872791216279
Reset training subject: 2876102340809
Constructing line search parameters: GD
th(0)=4.740839090512594;dx=-9.933972195933671E-6
New Minimum: 4.740839090512594 > 4.740817688492189
WOLFE (weak): th(2.154434690031884)=4.740817688492189; dx=-9.933903589265083E-6 evalInputDelta=2.1402020404792665E-5
New Minimum: 4.740817688492189 > 4.740796286619593
WOLFE (weak): th(4.308869380063768)=4.740796286619593; dx=-9.933834983081981E-6 evalInputDelta=4.280389300070908E-5
New Minimum: 4.740796286619593 > 4.740710680607267
WOLFE (weak): th(12.926608140191302)=4.740710680607267; dx=-9.933560563204331E-6 evalInputDelta=1.284099053266985E-4
New Minimum: 4.740710680607267 > 4.740325482815798
WOLFE (weak): th(51.70643256076521)=4.740325482815798; dx=-9.932325769874419E-6 evalInputDelta=5.136076967957592E-4
New Minimum: 4.740325482815798 > 4.738271903119961
WOLFE (weak): th(258.53216280382605)=4.738271903119961; dx=-9.925742861168447E-6 evalInputDelta=0.0025671873926329525
New Minimum: 4.738271903119961 > 4.725467829740774
WOLFE (weak): th(1551.1929768229563)=4.725467829740774; dx=-9.884700863012195E-6 evalInputDelta=0.015371260771820161
New Minimum: 4.725467829740774 > 4.634827691871996
WOLFE (weak): th(10858.350837760694)=4.634827691871996; dx=-9.594293021838727E-6 evalInputDelta=0.10601139864059839
New Minimum: 4.634827691871996 > 3.9873404871414286
END: th(86866.80670208555)=3.9873404871414286; dx=-7.5298343231018426E-6 evalInputDelta=0.7534986033711655
Fitness changed from 4.740839090512594 to 3.9873404871414286
Iteration 1 complete. Error: 3.9873404871414286 Total: 44.6652; Orientation: 1.8536; Line Search: 35.5271
Final threshold in iteration 1: 3.9873404871414286 (> 0.0) after 44.667s (< 30.000s)

Returns

    3.9873404871414286

This training apply resulted in the following configuration:

TrainingTester.java:610 executed in 0.00 seconds (0.000 gc):

    RefList<double[]> state = network.state();
    assert state != null;
    String description = state.stream().map(RefArrays::toString).reduce((a, b) -> a + "\n" + b)
        .orElse("");
    state.freeRef();
    return description;

Returns

    

And regressed input:

TrainingTester.java:622 executed in 0.00 seconds (0.000 gc):

    return RefArrays.stream(RefUtil.addRef(data)).flatMap(x -> {
      return RefArrays.stream(x);
    }).limit(1).map(x -> {
      String temp_18_0015 = x.prettyPrint();
      x.freeRef();
      return temp_18_0015;
    }).reduce((a, b) -> a + "\n" + b).orElse("");

Returns

    [
    	[ [ 1.3193402976219937, -1.1040175020316234, -1.2653297042234457 ], [ -1.2115541621047141, 1.208828788749804, -0.9249112712965657 ], [ 1.7084116858578715, 0.32819337939616516, 1.019708405724653 ], [ 1.642512070352528, 0.044311410428504346, 1.963988032439076 ], [ -1.8561507566239526, 1.1293241775798415, 0.8396096898730284 ], [ -1.1320284755013368, -0.5589534218729687, 0.5483784769458843 ], [ -0.05216072131632014, 0.23202558271202214, -0.9129410497709936 ], [ -0.04313757819776276, 1.683684406006066, 0.5122862051895318 ], ... ],
    	[ [ -0.484, 1.3706145038683644, -1.2562568427781202 ], [ -0.5552203155481197, -0.5606059638523438, 0.9364390659387722 ], [ -1.3818420510370175, -0.24305282750832038, 1.9639831259554763 ], [ -1.8436068749985577, 0.5568693762276684, 1.2837992134460572 ], [ 1.1419561460958618, -1.9293497449262798, -1.4318706420546488 ], [ 0.9881357036765762, 1.293182959110684, 1.9794407822038917 ], [ -1.3689847255466452, 0.8900668255972997, -0.24571798375916085 ], [ 0.5790499038393881, -1.5319612659259976, -1.3681144818702198 ], ... ],
    	[ [ 0.7676982894641119, -1.5334075498977964, -0.8477687304922957 ], [ -0.02946501697177506, -1.4663521738328482, 1.4725231071691283 ], [ -0.9948509967264595, -0.9181724454297341, -1.9913811691610643 ], [ 1.7942355275832906, 1.7509582720519774, 1.5299969333274075 ], [ 1.065175077042013, 1.2863289719434894, 1.2581797007389623 ], [ -0.4896023634407227, 1.4552642256517365, -0.7180578992112508 ], [ -1.086378591287984, 1.3517484868272045, 1.75099081110823 ], [ -1.5172846963157005, -1.1939435269914442, 1.5240927732662541 ], ... ],
    	[ [ -0.4199474041351455, 0.30806409106628174, 0.9910556839616366 ], [ 0.9594914697967681, 1.8491646166779052, -1.6802098367856746 ], [ -0.2690787204597298, 0.35500252655983133, -1.6952419912761714 ], [ 1.4072018258803536, 0.07787750061685528, -0.5112068053347536 ], [ 1.7498483216167326, 0.9054060711735497, 1.9710751476450317 ], [ 0.931973499754835, 0.047012475029515936, -0.7637320871818649 ], [ 0.38902008182730613, 0.5864443882237615, 1.0267699687777574 ], [ -1.9098708798060402, -0.05472955547809561, -0.702002678490196 ], ... ],
    	[ [ 0.13929135379245522, 1.517193572086987, -1.7834317122876584 ], [ 1.3470396650168943, -1.2681046003519076, -1.950549196230937 ], [ -1.9657485840727047, 0.2879598734725765, -0.07345967492809831 ], [ 0.7956215896991456, -1.0636905392623686, -0.5284714414318518 ], [ 1.3954839654692999, 1.5741880350964685, 1.475434839096175 ], [ 1.4176747643072594, 1.5998820943323329, 0.011555916999842045 ], [ 1.584669878826831, 1.0438745073787241, 1.8198041199256765 ], [ 0.8266093123561548, 1.9280201643821944, -0.43741965453937726 ], ... ],
    	[ [ -0.2805017852185558, -1.1635383238120844, 1.9063485468003882 ], [ -1.2240643807008682, 1.6699927131248937, -0.5321207727838275 ], [ -0.5885582336370623, -1.838367704744723, 0.26885418102462144 ], [ -0.4342016634318283, 1.3731281942935218, -0.4868864906657553 ], [ -1.2041096720692723, -0.668274903292578, 1.3963020217148907 ], [ 0.12113958454705882, -1.996243080631666, -1.8954233886760488 ], [ -0.6062360655699788, -1.3184229321039327, -0.25599405936079184 ], [ 0.24630090344969355, 0.5922050939749753, -0.3295757734677782 ], ... ],
    	[ [ 0.9829005199338887, 0.7128006211048467, -1.4451916454281304 ], [ 1.186875506653702, 1.5693561855590856, 1.0551384921535878 ], [ 1.862589611705331, -1.7876922079122832, -1.516 ], [ 0.8191100064115754, -0.8785062758958867, 0.08304473020636464 ], [ -1.308214625780783, 1.1583256428993642, 0.7772133361755944 ], [ 0.5930522742156175, -0.1767178355271521, -0.8418058024114589 ], [ -1.7719934588888713, -0.9126873353478125, 1.189886526696645 ], [ 0.42232010360807465, -1.7089958075271054, 1.2822615045596244 ], ... ],
    	[ [ -0.08297369779253225, -1.3591352161940549, 0.3444830624068722 ], [ 0.13511970982814145, 1.0881422713897864, -0.21271509513988943 ], [ 1.341118307005106, -1.7626961846619682, 0.7832811961454426 ], [ 0.7229835941149606, 0.20804417213224036, -1.2063922908663092 ], [ -1.4395407500268877, 1.9843845969682787, -1.1435195692966251 ], [ 1.8592688380177111, -1.704006722018813, 0.40564593110703867 ], [ 1.0545097014832137, -0.9449094930971963, -0.51465849051324 ], [ -0.14334548691458582, 0.267402540305808, 0.8317457134340311 ], ... ],
    	...
    ]

To produce the following output:

TrainingTester.java:633 executed in 0.12 seconds (0.000 gc):

    Result[] array = ConstantResult.batchResultArray(pop(RefUtil.addRef(data)));
    @Nullable
    Result eval = layer.eval(array);
    assert eval != null;
    TensorList tensorList = Result.getData(eval);
    String temp_18_0016 = tensorList.stream().limit(1).map(x -> {
      String temp_18_0017 = x.prettyPrint();
      x.freeRef();
      return temp_18_0017;
    }).reduce((a, b) -> a + "\n" + b).orElse("");
    tensorList.freeRef();
    return temp_18_0016;

Returns

    [
    	[ [ -0.2216784535499116, 0.013879105666256965, 1.6714155168386906 ], [ 0.0030439824504758027, 3.237995584147486, -0.5791801648204863 ], [ -1.763768085964064, -0.4999129139483262, 0.027592503971527415 ], [ -0.570805888679255, -0.003424361334830741, 2.4832924340560703 ], [ 0.11069559701949101, 0.04396016387724073, 0.2621072555676141 ], [ 0.021780432847997497, -0.34114876937482447, -1.1387125526506006 ], [ -0.002618392948829272, 0.05322653649982012, 0.31144107501904544 ], [ -0.05789477224534354, -4.1762856571933735, 0.10297237305350285 ], ... ],
    	[ [ 0.0025826236300111214, 1.5313216900127418, 1.9815313922421582 ], [ 0.116435183854713, -0.1574068917504655, -0.7581880133418483 ], [ -4.208790844848692, 0.4284083100109666, 0.012683220129221436 ], [ -1.7062066207298412, -1.6310220162672346, -0.15064887436793672 ], [ -1.592255280045664, 2.2021788271292193, -0.028438922492810628 ], [ 0.5260619286880861, -2.475779244836043, 0.9323211548965961 ], [ 3.1300435336722376, 2.4202269083648216, -0.04234210553823681 ], [ 0.21606063800200725, -0.055358949352049915, -0.06401887175664925 ], ... ],
    	[ [ -0.20808747867910432, -1.8797825741287872, -0.3292988372440335 ], [ -0.016761592775077334, 0.6113742523792633, -3.0733669962134216 ], [ -3.064192330776278, -1.836494386050823, -1.8562674793422949 ], [ -2.7557536191988405, -0.5391262740514242, -3.9335145046716558 ], [ -2.4346210757882454, -0.34143585929147807, 0.5317603112790324 ], [ -0.5212814983030067, 0.5813980331156535, -0.4213753906599427 ], [ 0.7637263525959689, -0.7050958883017533, -2.2798849528347347 ], [ 0.5681008064370181, 0.14083015551982814, -0.41807436506715456 ], ... ],
    	[ [ -0.5825769754771887, 0.03913898882265009, -2.517220029137869 ], [ 0.18596885928593687, 3.460862576991336, -0.14367519763884654 ], [ 0.20667037386072457, 0.23254480458214458, 0.20106115573634592 ], [ 0.15613390701766192, 0.0108057273733007, -0.2521032815737971 ], [ 2.6118395188089782, -1.4365547404893027, 1.6384739408303286 ], [ 3.154960287971271, 0.121327529183666, -0.33901637183298167 ], [ -0.506424765720648, 0.18885582630396222, 1.8608361401816629 ], [ -3.303926998606544, 0.15514862473155108, -0.18868217143124325 ], ... ],
    	[ [ 0.014799214021449668, 0.2733324215267287, 1.0317318083780411 ], [ -0.23911229446188131, -4.240358122575321, 0.35541683443015665 ], [ -2.2790639762060914, -0.011641864938541892, 0.011436166538685805 ], [ -0.6994729328700823, 0.12653232724811542, 0.1620491956970833 ], [ 0.5446380171375573, 1.2185226962847215, -3.356886631813278 ], [ -3.4337136285724648, -0.4186357578905258, 0.025773039125913257 ], [ -0.24708543565541904, -0.6191084080943424, 0.08118691351712896 ], [ 1.360369181702106, 3.3368900739154554, -0.5097839815048301 ], ... ],
    	[ [ -0.2180649592740849, -0.5449480913525683, 0.5894634457638214 ], [ 1.1674565863131963, -1.4162008220362023, 0.010898942911743774 ], [ -0.11864917516002577, 2.9911220560327503, 0.34607973319630336 ], [ -0.19025713999641602, 0.15334293007227215, -0.7530624252147369 ], [ 0.08083001029993271, 0.013078715948704998, 0.9073502931116921 ], [ -0.17136551773615652, 0.16701194621484708, -0.4866358969585033 ], [ -1.1823050242110489, 1.775507564139697, -0.00213719357736965 ], [ -0.3370368872114297, -0.09887871263255994, 0.7560061898506197 ], ... ],
    	[ [ -3.4072448807733635, -0.9060307511848795, -1.5553440752712835 ], [ 0.30313486522449545, 0.8910898850701656, -2.4535127738377986 ], [ -4.515413769849472, 1.2914947834386603, 0.01940340941866542 ], [ 0.28931249461403125, -0.09215703170139229, 0.1230736423927646 ], [ 0.11815914507660606, -0.2350214702671629, -1.181189257692094 ], [ 0.33595406746143125, -0.08745322794215281, 0.2538510094483087 ], [ -0.059923149022558575, -0.046185069168521165, -0.8333669657295834 ], [ -0.5672912429240398, -0.4608387747827603, -0.07729539582826192 ], ... ],
    	[ [ -0.15821789112135523, -0.03201774079965577, -0.07279060364143776 ], [ 0.05941140319352598, 0.1712545937976942, -0.2970878231606954 ], [ -0.16318311841941968, 1.0914211353179082, 0.4203976225571701 ], [ 0.16722522509953092, 0.04385235140382118, 1.1543992524405875 ], [ -1.5115966916788623, -0.21330727384605494, 0.6787835052324205 ], [ 0.4388011654254634, 0.0145026352735248, -0.3420128647175179 ], [ -0.4383839506758006, 0.9755415621384791, 0.586325135817365 ], [ 0.21280187866703992, 0.13786269984201024, -0.023018451598100568 ], ... ],
    	...
    ]

Conjugate Gradient Descent

First, we use a conjugate gradient descent method, which converges the fastest for purely linear functions.

TrainingTester.java:452 executed in 102.58 seconds (7.602 gc):

    IterativeTrainer iterativeTrainer = new IterativeTrainer(trainable.addRef());
    try {
      iterativeTrainer.setLineSearchFactory(label -> new QuadraticSearch());
      iterativeTrainer.setOrientation(new GradientDescent());
      iterativeTrainer.setMonitor(TrainingTester.getMonitor(history));
      iterativeTrainer.setTimeout(30, TimeUnit.SECONDS);
      iterativeTrainer.setMaxIterations(250);
      iterativeTrainer.setTerminateThreshold(0);
      return iterativeTrainer.run();
    } finally {
      iterativeTrainer.freeRef();
    }
Logging
Reset training subject: 2918056191774
Reset training subject: 2919566854363
Constructing line search parameters: GD
F(0.0) = LineSearchPoint{point=PointSample{avg=4.740839090512594}, derivative=-9.933972195933671E-6}
New Minimum: 4.740839090512594 > 4.740839090512592
F(1.0E-10) = LineSearchPoint{point=PointSample{avg=4.740839090512592}, derivative=-9.933972195933668E-6}, evalInputDelta = -1.7763568394002505E-15
New Minimum: 4.740839090512592 > 4.740839090512587
F(7.000000000000001E-10) = LineSearchPoint{point=PointSample{avg=4.740839090512587}, derivative=-9.933972195933649E-6}, evalInputDelta = -7.105427357601002E-15
New Minimum: 4.740839090512587 > 4.740839090512544
F(4.900000000000001E-9) = LineSearchPoint{point=PointSample{avg=4.740839090512544}, derivative=-9.933972195933515E-6}, evalInputDelta = -4.973799150320701E-14
New Minimum: 4.740839090512544 > 4.740839090512253
F(3.430000000000001E-8) = LineSearchPoint{point=PointSample{avg=4.740839090512253}, derivative=-9.933972195932578E-6}, evalInputDelta = -3.410605131648481E-13
New Minimum: 4.740839090512253 > 4.7408390905102085
F(2.4010000000000004E-7) = LineSearchPoint{point=PointSample{avg=4.7408390905102085}, derivative=-9.933972195926026E-6}, evalInputDelta = -2.3856472353145364E-12
New Minimum: 4.7408390905102085 > 4.740839090495898
F(1.6807000000000003E-6) = LineSearchPoint{point=PointSample{avg=4.740839090495898}, derivative=-9.93397219588015E-6}, evalInputDelta = -1.6695977933522954E-11
New Minimum: 4.740839090495898 > 4.740839090395721
F(1.1764900000000001E-5) = LineSearchPoint{point=PointSample{avg=4.740839090395721}, derivative=-9.933972195559023E-6}, evalInputDelta = -1.1687273371308038E-10
New Minimum: 4.740839090395721 > 4.740839089694488
F(8.235430000000001E-5) = LineSearchPoint{point=PointSample{avg=4.740839089694488}, derivative=-9.933972193311138E-6}, evalInputDelta = -8.181064714563036E-10
New Minimum: 4.740839089694488 > 4.740839084785856
F(5.764801000000001E-4) = LineSearchPoint{point=PointSample{avg=4.740839084785856}, derivative=-9.933972177575946E-6}, evalInputDelta = -5.726738194766767E-9
New Minimum: 4.740839084785856 > 4.740839050425434
F(0.004035360700000001) = LineSearchPoint{point=PointSample{avg=4.740839050425434}, derivative=-9.933972067429598E-6}, evalInputDelta = -4.008716025794001E-8
New Minimum: 4.740839050425434 > 4.740838809902479
F(0.028247524900000005) = LineSearchPoint{point=PointSample{avg=4.740838809902479}, derivative=-9.933971296405196E-6}, evalInputDelta = -2.8061011470015274E-7
New Minimum: 4.740838809902479 > 4.740837126242328
F(0.19773267430000002) = LineSearchPoint{point=PointSample{avg=4.740837126242328}, derivative=-9.933965899236114E-6}, evalInputDelta = -1.9642702664413036E-6
New Minimum: 4.740837126242328 > 4.740825340646877
F(1.3841287201) = LineSearchPoint{point=PointSample{avg=4.740825340646877}, derivative=-9.933928119136658E-6}, evalInputDelta = -1.374986571711645E-5
New Minimum: 4.740825340646877 > 4.740742842733729
F(9.688901040700001) = LineSearchPoint{point=PointSample{avg=4.740742842733729}, derivative=-9.933663662562595E-6}, evalInputDelta = -9.62477788650773E-5
New Minimum: 4.740742842733729 > 4.740165418832547
F(67.8223072849) = LineSearchPoint{point=PointSample{avg=4.740165418832547}, derivative=-9.931812668515107E-6}, evalInputDelta = -6.736716800475051E-4
New Minimum: 4.740165418832547 > 4.736126463014976
F(474.7561509943) = LineSearchPoint{point=PointSample{avg=4.736126463014976}, derivative=-9.918865602085752E-6}, evalInputDelta = -0.004712627497617916
New Minimum: 4.736126463014976 > 4.708000801221195
F(3323.2930569601003) = LineSearchPoint{point=PointSample{avg=4.708000801221195}, derivative=-9.828719240291009E-6}, evalInputDelta = -0.03283828929139876
New Minimum: 4.708000801221195 > 4.518145542413708
F(23263.0513987207) = LineSearchPoint{point=PointSample{avg=4.518145542413708}, derivative=-9.220825265232523E-6}, evalInputDelta = -0.22269354809888586
New Minimum: 4.518145542413708 > 3.4786272801066525
F(162841.3597910449) = LineSearchPoint{point=PointSample{avg=3.4786272801066525}, derivative=-5.930056751844114E-6}, evalInputDelta = -1.2622118104059417
New Minimum: 3.4786272801066525 > 1.5465304924595489
F(1139889.5185373144) = LineSearchPoint{point=PointSample{avg=1.5465304924595489}, derivative=-2.911198762790761E-7}, evalInputDelta = -3.1943085980530452
F(7979226.6297612) = LineSearchPoint{point=PointSample{avg=179877.1487833525}, derivative=0.14923191759982932}, evalInputDelta = 179872.40794426197
F(613786.6638277846) = LineSearchPoint{point=PointSample{avg=2.0073732097251353}, derivative=-1.620866979078558E-6}, evalInputDelta = -2.733465880787459
F(4296506.646794492) = LineSearchPoint{point=PointSample{avg=2546.458449312806}, derivative=0.004269287127512586}, evalInputDelta = 2541.7176102222934
F(330500.511291884) = LineSearchPoint{point=PointSample{avg=2.7011585319897473}, derivative=-3.566154401108687E-6}, evalInputDelta = -2.039680558522847
F(2313503.579043188) = LineSearchPoint{point=PointSample{avg=22.20558421111702}, derivative=7.658762844855562E-5}, evalInputDelta = 17.464745120604427
F(177961.81377255294) = LineSearchPoint{point=PointSample{avg=3.3910381351495333}, derivative=-5.65778375621826E-6}, evalInputDelta = -1.3498009553630608
New Minimum: 1.5465304924595489 > 1.5331261430129999
F(1245732.6964078706) = LineSearchPoint{point=PointSample{avg=1.5331261430129999}, derivative=6.735009672079396E-8}, evalInputDelta = -3.2077129474995942
1.5331261430129999 <= 4.740839090512594
Converged to right
Fitness changed from 4.740839090512594 to 1.5331261430129999
Iteration 1 complete. Error: 1.5331261430129999 Total: 102.5764; Orientation: 2.1208; Line Search: 95.9540
Final threshold in iteration 1: 1.5331261430129999 (> 0.0) after 102.577s (< 30.000s)

Returns

    1.5331261430129999

This training apply resulted in the following configuration:

TrainingTester.java:610 executed in 0.00 seconds (0.000 gc):

    RefList<double[]> state = network.state();
    assert state != null;
    String description = state.stream().map(RefArrays::toString).reduce((a, b) -> a + "\n" + b)
        .orElse("");
    state.freeRef();
    return description;

Returns

    

And regressed input:

TrainingTester.java:622 executed in 0.00 seconds (0.000 gc):

    return RefArrays.stream(RefUtil.addRef(data)).flatMap(x -> {
      return RefArrays.stream(x);
    }).limit(1).map(x -> {
      String temp_18_0015 = x.prettyPrint();
      x.freeRef();
      return temp_18_0015;
    }).reduce((a, b) -> a + "\n" + b).orElse("");

Returns

    [
    	[ [ 1.3105393918185697, -1.104250991763996, -0.9095287116784869 ], [ -1.2056063628382243, 0.09926480933564719, -0.8837054013807867 ], [ 1.5004523257390516, 0.5442247466552279, 1.0158183276596 ], [ 1.6226620150759392, 0.04846585027720301, 1.7503768299797962 ], [ -1.8581619587825033, 1.1469896621009257, 0.834402671108021 ], [ -1.1324083592388028, -0.5449913143847962, 0.23325031269558877 ], [ -0.0543048596621764, 0.23236687455241745, -0.8187695595540061 ], [ -0.03163224898059781, -0.13486399138494987, 0.46274150232071254 ], ... ],
    	[ [ -0.484, 1.8323969653752998, -0.886143104311328 ], [ -0.5448187622929521, -0.5686899589424858, 0.835570749138299 ], [ 0.6213733511150297, -0.23041686293281124, 1.963758014024209 ], [ -1.6782736438566934, 0.9953705756189031, 1.2811205759168343 ], [ 0.7945123402069789, -1.0401872434195911, -1.430144913711208 ], [ 1.0433089752940294, -0.4520107717788777, 2.0787061865187146 ], [ -0.421589715455734, -0.09625507554956592, -0.2686371269065308 ], [ 0.5663749354104288, -1.531444525425972, -1.3696417526359386 ], ... ],
    	[ [ 0.7636732531829383, -0.8051048683467086, -0.844683428361567 ], [ -0.15573517389942826, -1.497731870232818, 0.6256955480706445 ], [ 0.6213641353326083, -0.41352556882925173, -1.929762632544186 ], [ 1.2904302124351186, 1.7904237560919716, -0.20433779409842812 ], [ -0.520035146411761, 1.2640362517555264, 1.2338955949595514 ], [ -0.24416461490151342, 1.3387226915869606, -0.745511759792888 ], [ -0.691207622483796, 1.2416673463150911, 1.8442532762807802 ], [ -1.2676090390810375, -1.0064200330629605, 1.525330435588839 ], ... ],
    	[ [ -0.31251996406163185, 0.3089191121424394, -1.0493318657856643 ], [ 0.9527073098997482, 1.010895257378259, -1.68300921093739 ], [ -0.07001808330189147, 0.34169551103129625, -1.6851296078757219 ], [ 1.3965536071139342, 0.04956182712279404, -0.5006250160835027 ], [ 1.2408773994436184, 0.6573496421339737, 1.8520111765207803 ], [ 0.8248941940801454, -0.01952470737402539, -0.760157936845978 ], [ -0.024274383564263702, 0.5656914045059418, 0.5834573714886182 ], [ -1.0276606936775543, 0.015581977913117093, -0.6753569681623658 ], ... ],
    	[ [ 0.1298375019821837, 1.4263909118006044, -1.722487457454499 ], [ 1.2808652166669061, -1.00268561133219, -1.9311944269645853 ], [ -2.042438844125784, 0.2874245566390374, -0.03956990521793145 ], [ 0.6838475499221474, -1.059562109696927, -0.5347608103531795 ], [ 1.3885996924282802, 1.44328934238074, 0.2939116787048863 ], [ 0.42612248261630414, 1.6516720342778584, 0.4325346224208839 ], [ 1.5936065458015105, 0.9888374586078856, 1.8171909383753837 ], [ 0.5412421023835554, 1.234571645472351, -0.1895444374025136 ], ... ],
    	[ [ -0.340558848792423, -1.050653457012031, 1.8309540826898378 ], [ -1.171560379071417, 1.3763996244322698, -0.5337319688769745 ], [ -0.5960054732103612, -1.12287418175699, 0.5470640055214071 ], [ -0.46357344246485616, 1.334816233701792, -0.2052170393717263 ], [ -1.2055727766193103, -0.6719423116022785, 1.2936054360685036 ], [ 0.08297957815739984, -1.999485951679659, -1.8877309774972115 ], [ 0.057650732367562796, -0.9772063884235062, -0.25591480694663327 ], [ 0.010182182738539935, 0.5949411956092656, -0.24387195542060258 ], ... ],
    	[ [ 0.11442647616100021, 0.6701185977797192, -1.3010003954822016 ], [ 1.171873953912215, 1.534085792982151, 0.0831133962464985 ], [ 0.18952452819328647, -1.7302231527008924, -1.516 ], [ 0.8072368496692158, -0.9119418050260859, 0.016937855139256122 ], [ -1.3110778885832686, 1.189351397500345, 0.5799485698141671 ], [ 0.44700171171613545, -0.18629427949254965, -0.8125336231156828 ], [ -1.771906195745954, -0.921856884910061, 1.2150541542607516 ], [ 0.45327196006576664, -1.7756434852131655, 1.2590687207823887 ], ... ],
    	[ [ -0.4961851788237468, -1.347598376142861, 0.3509274635213657 ], [ 0.12337600366534056, 1.0900402744011057, -0.16889209372156394 ], [ 1.2493115562106354, -1.7453023471412428, 0.8804176072736762 ], [ 0.7627868927490734, 0.2086334602535878, -0.9181298576034402 ], [ -1.3266882506832114, 1.989515398073365, -1.0837473903659725 ], [ 1.8495146095236195, -1.7040963986007924, 0.3742409532375708 ], [ 1.034628043780451, -1.1171314472786185, -0.4433988992061104 ], [ -0.18797669665925631, 0.25943199682252177, 0.8283533471355627 ], ... ],
    	...
    ]

To produce the following output:

TrainingTester.java:633 executed in 0.12 seconds (0.000 gc):

    Result[] array = ConstantResult.batchResultArray(pop(RefUtil.addRef(data)));
    @Nullable
    Result eval = layer.eval(array);
    assert eval != null;
    TensorList tensorList = Result.getData(eval);
    String temp_18_0016 = tensorList.stream().limit(1).map(x -> {
      String temp_18_0017 = x.prettyPrint();
      x.freeRef();
      return temp_18_0017;
    }).reduce((a, b) -> a + "\n" + b).orElse("");
    tensorList.freeRef();
    return temp_18_0016;

Returns

    [
    	[ [ -0.17694462168693606, 0.01476419739735967, 0.5153493395656923 ], [ -0.6080281643690213, 0.06239155127505673, -0.3853174526997348 ], [ -0.6541789814297267, -0.9315316942331707, -0.12228034412616712 ], [ -0.25835741242731913, -0.004096813649725363, 0.9848362737049944 ], [ 0.298289766263698, 0.11054965155312337, 0.2454143030277705 ], [ 0.054314226217299554, -0.3202303486924296, -0.40356880478022406 ], [ -0.0030205769975641655, 0.05342706725870542, -0.06475334155791256 ], [ -0.04241977308810261, 0.01524125696521851, -0.012879033956978187 ], ... ],
    	[ [ 0.037036686630535436, 5.67090997148835, 0.6943540313898641 ], [ 0.08897857316534914, -0.16818762161824047, -0.5112672316660338 ], [ -0.018650154159421888, 0.4043359141521035, 0.010908679312822566 ], [ -0.707898756259602, -3.394445022007159, -0.13683604686129539 ], [ -0.5585171438153169, -1.7162427395972712, 0.034583265986246246 ], [ 0.7189215461103223, -0.27972291969815, 2.688517702615784 ], [ 0.06502119165881956, -0.11253798909902676, -0.052651736564133786 ], [ 0.1715955597440444, -0.019900000458196766, -0.09598068752364292 ], ... ],
    	[ [ -0.2014599049807284, 0.6129554221060987, -0.32322617581601887 ], [ -0.09052245119038559, 0.9342134793700625, -0.17957971029431802 ], [ 0.36264385585447506, -0.4641679845304024, -1.2457154112958195 ], [ 0.46100505750385895, -2.7472379890516585, -0.016817205851124156 ], [ 0.05870994396797484, -0.2375607782984617, 0.27416738094306603 ], [ -0.20238593377673814, 0.07939792617739469, -0.4753579362500563 ], [ 0.014154083200446503, -0.299646011886055, -3.057828068648342 ], [ -1.5476325150084085, -2.1080896818064705, -0.43777312808731184 ], ... ],
    	[ [ -0.3930873807368098, 0.039413479034606874, -0.020451951965622172 ], [ 0.06809320997877845, -0.19923792659744302, -0.1768630461162 ], [ 0.044136189938328525, 0.21929637673923313, 0.08643703253633095 ], [ -0.019736015679270184, 0.00560988453786488, -0.23794875210756922 ], [ 0.3029919725764011, -0.7635163384052909, 0.692398006477963 ], [ 2.6292364046702805, -0.05025093662456099, -0.3328026627840884 ], [ 0.021132989176060657, 0.14069566963641736, 0.5876311797117694 ], [ 0.182607858536362, -0.04404075990209183, -0.1558727510787282 ], ... ],
    	[ [ 0.013004559682097528, -0.5516149476032901, 0.6141145257809585 ], [ 0.1121174975127513, -2.6990500907855184, -0.2605972533908204 ], [ -3.0523859093973913, -0.011117228027044233, 0.005790136478306907 ], [ -0.47969996346946203, 0.07301436116484997, 0.1719713125927771 ], [ 0.4809120106906775, 0.6051773109230018, -0.015414603143147408 ], [ -0.15731760872861286, -1.586283263094588, 0.9574065071204874 ], [ -0.32901968682686095, -0.22890198905069994, -0.021857340209989858 ], [ 0.6149054610322706, -0.140346023197707, -0.1534812538954356 ], ... ],
    	[ [ -0.2771063234996511, -0.24932743119682618, 0.10015530716399569 ], [ 0.9396607966412921, 0.2827319689282919, 0.022019873907800774 ], [ -0.14687203524269526, -0.14304699398760376, 0.7825851733250642 ], [ -0.2296384783412873, -0.3723845357640185, -0.26106940436885145 ], [ 0.08601353823327518, 0.08684794299460331, 0.26814097146677907 ], [ -0.11648650423069773, 0.19523752703713873, -0.4308880891821031 ], [ 0.06080604490680603, 0.14614591981803932, -0.0021249585721163132 ], [ -0.0125311170013439, -0.10448090632393069, 0.5454672926326031 ], ... ],
    	[ [ -0.2185540034705022, -0.8012210102613221, -0.7098214766672443 ], [ 0.10965801246839212, 0.6440071059135749, -0.055131581016776274 ], [ -0.00851741762305867, 0.7284566355301924, 0.2782588937281566 ], [ 0.24472240898485226, -0.3540720206726969, 0.024900193575445328 ], [ 0.20933791259930787, -0.7905503961999335, -0.6796591480743593 ], [ 0.05174103739662849, -0.09289670671131067, 0.20344535404740471 ], [ -0.05937313882205878, -0.18194405770565147, -1.0566606236443017 ], [ -0.6265934086161603, -0.992787224060582, 0.06503191822741958 ], ... ],
    	[ [ -0.9697624166923396, 0.0457031702261338, -0.07644514115392988 ], [ 0.05319278786075101, 0.17912571935377847, -0.23122263284380154 ], [ 0.18882742179703033, 0.863503503084454, 0.7529738399745266 ], [ 0.27800367386207525, 0.04428789200997037, 0.35637560712434546 ], [ -0.6305455679871215, -0.256848580635133, 0.2819150910713603 ], [ -0.056214378897884464, 0.09523780125045869, -0.3028887137607068 ], [ -0.3877141557207082, 1.8984925406053645, 0.4475229640159093 ], [ 0.2815375605412957, 0.12991557021162936, -0.017815339208324243 ], ... ],
    	...
    ]

Limited-Memory BFGS

Next, we apply the same optimization using L-BFGS, which is nearly ideal for purely second-order or quadratic functions.

TrainingTester.java:509 executed in 62.69 seconds (15.856 gc):

    IterativeTrainer iterativeTrainer = new IterativeTrainer(trainable.addRef());
    try {
      iterativeTrainer.setLineSearchFactory(label -> new ArmijoWolfeSearch());
      iterativeTrainer.setOrientation(new LBFGS());
      iterativeTrainer.setMonitor(TrainingTester.getMonitor(history));
      iterativeTrainer.setTimeout(30, TimeUnit.SECONDS);
      iterativeTrainer.setIterationsPerSample(100);
      iterativeTrainer.setMaxIterations(250);
      iterativeTrainer.setTerminateThreshold(0);
      return iterativeTrainer.run();
    } finally {
      iterativeTrainer.freeRef();
    }
Logging
Reset training subject: 3021228370117
Reset training subject: 3022721550237
Adding measurement 1f0721f3 to history. Total: 0
LBFGS Accumulation History: 1 points
Constructing line search parameters: GD
Non-optimal measurement 4.740839090512594 < 4.740839090512594. Total: 1
th(0)=4.740839090512594;dx=-9.933972195933671E-6
Adding measurement 5d318c46 to history. Total: 1
New Minimum: 4.740839090512594 > 4.740817688492189
WOLFE (weak): th(2.154434690031884)=4.740817688492189; dx=-9.933903589265083E-6 evalInputDelta=2.1402020404792665E-5
Adding measurement 513ad86d to history. Total: 2
New Minimum: 4.740817688492189 > 4.740796286619593
WOLFE (weak): th(4.308869380063768)=4.740796286619593; dx=-9.933834983081981E-6 evalInputDelta=4.280389300070908E-5
Adding measurement 6523c319 to history. Total: 3
New Minimum: 4.740796286619593 > 4.740710680607267
WOLFE (weak): th(12.926608140191302)=4.740710680607267; dx=-9.933560563204331E-6 evalInputDelta=1.284099053266985E-4
Adding measurement 14cdef93 to history. Total: 4
New Minimum: 4.740710680607267 > 4.740325482815798
WOLFE (weak): th(51.70643256076521)=4.740325482815798; dx=-9.932325769874419E-6 evalInputDelta=5.136076967957592E-4
Adding measurement 52e35240 to history. Total: 5
New Minimum: 4.740325482815798 > 4.738271903119961
WOLFE (weak): th(258.53216280382605)=4.738271903119961; dx=-9.925742861168447E-6 evalInputDelta=0.0025671873926329525
Adding measurement 5859ddbf to history. Total: 6
New Minimum: 4.738271903119961 > 4.725467829740774
WOLFE (weak): th(1551.1929768229563)=4.725467829740774; dx=-9.884700863012195E-6 evalInputDelta=0.015371260771820161
Adding measurement 6ea61ef9 to history. Total: 7
New Minimum: 4.725467829740774 > 4.634827691871996
WOLFE (weak): th(10858.350837760694)=4.634827691871996; dx=-9.594293021838727E-6 evalInputDelta=0.10601139864059839
Adding measurement 2428ae4 to history. Total: 8
New Minimum: 4.634827691871996 > 3.9873404871414286
END: th(86866.80670208555)=3.9873404871414286; dx=-7.5298343231018426E-6 evalInputDelta=0.7534986033711655
Fitness changed from 4.740839090512594 to 3.9873404871414286
Iteration 1 complete. Error: 3.9873404871414286 Total: 62.6926; Orientation: 2.4665; Line Search: 55.5203
Final threshold in iteration 1: 3.9873404871414286 (> 0.0) after 62.693s (< 30.000s)

Returns

    3.9873404871414286

This training apply resulted in the following configuration:

TrainingTester.java:610 executed in 0.00 seconds (0.000 gc):

    RefList<double[]> state = network.state();
    assert state != null;
    String description = state.stream().map(RefArrays::toString).reduce((a, b) -> a + "\n" + b)
        .orElse("");
    state.freeRef();
    return description;

Returns

    

And regressed input:

TrainingTester.java:622 executed in 0.00 seconds (0.000 gc):

    return RefArrays.stream(RefUtil.addRef(data)).flatMap(x -> {
      return RefArrays.stream(x);
    }).limit(1).map(x -> {
      String temp_18_0015 = x.prettyPrint();
      x.freeRef();
      return temp_18_0015;
    }).reduce((a, b) -> a + "\n" + b).orElse("");

Returns

    [
    	[ [ 1.3193402976219937, -1.1040175020316234, -1.2653297042234457 ], [ -1.2115541621047141, 1.208828788749804, -0.9249112712965657 ], [ 1.7084116858578715, 0.32819337939616516, 1.019708405724653 ], [ 1.642512070352528, 0.044311410428504346, 1.963988032439076 ], [ -1.8561507566239526, 1.1293241775798415, 0.8396096898730284 ], [ -1.1320284755013368, -0.5589534218729687, 0.5483784769458843 ], [ -0.05216072131632014, 0.23202558271202214, -0.9129410497709936 ], [ -0.04313757819776276, 1.683684406006066, 0.5122862051895318 ], ... ],
    	[ [ -0.484, 1.3706145038683644, -1.2562568427781202 ], [ -0.5552203155481197, -0.5606059638523438, 0.9364390659387722 ], [ -1.3818420510370175, -0.24305282750832038, 1.9639831259554763 ], [ -1.8436068749985577, 0.5568693762276684, 1.2837992134460572 ], [ 1.1419561460958618, -1.9293497449262798, -1.4318706420546488 ], [ 0.9881357036765762, 1.293182959110684, 1.9794407822038917 ], [ -1.3689847255466452, 0.8900668255972997, -0.24571798375916085 ], [ 0.5790499038393881, -1.5319612659259976, -1.3681144818702198 ], ... ],
    	[ [ 0.7676982894641119, -1.5334075498977964, -0.8477687304922957 ], [ -0.02946501697177506, -1.4663521738328482, 1.4725231071691283 ], [ -0.9948509967264595, -0.9181724454297341, -1.9913811691610643 ], [ 1.7942355275832906, 1.7509582720519774, 1.5299969333274075 ], [ 1.065175077042013, 1.2863289719434894, 1.2581797007389623 ], [ -0.4896023634407227, 1.4552642256517365, -0.7180578992112508 ], [ -1.086378591287984, 1.3517484868272045, 1.75099081110823 ], [ -1.5172846963157005, -1.1939435269914442, 1.5240927732662541 ], ... ],
    	[ [ -0.4199474041351455, 0.30806409106628174, 0.9910556839616366 ], [ 0.9594914697967681, 1.8491646166779052, -1.6802098367856746 ], [ -0.2690787204597298, 0.35500252655983133, -1.6952419912761714 ], [ 1.4072018258803536, 0.07787750061685528, -0.5112068053347536 ], [ 1.7498483216167326, 0.9054060711735497, 1.9710751476450317 ], [ 0.931973499754835, 0.047012475029515936, -0.7637320871818649 ], [ 0.38902008182730613, 0.5864443882237615, 1.0267699687777574 ], [ -1.9098708798060402, -0.05472955547809561, -0.702002678490196 ], ... ],
    	[ [ 0.13929135379245522, 1.517193572086987, -1.7834317122876584 ], [ 1.3470396650168943, -1.2681046003519076, -1.950549196230937 ], [ -1.9657485840727047, 0.2879598734725765, -0.07345967492809831 ], [ 0.7956215896991456, -1.0636905392623686, -0.5284714414318518 ], [ 1.3954839654692999, 1.5741880350964685, 1.475434839096175 ], [ 1.4176747643072594, 1.5998820943323329, 0.011555916999842045 ], [ 1.584669878826831, 1.0438745073787241, 1.8198041199256765 ], [ 0.8266093123561548, 1.9280201643821944, -0.43741965453937726 ], ... ],
    	[ [ -0.2805017852185558, -1.1635383238120844, 1.9063485468003882 ], [ -1.2240643807008682, 1.6699927131248937, -0.5321207727838275 ], [ -0.5885582336370623, -1.838367704744723, 0.26885418102462144 ], [ -0.4342016634318283, 1.3731281942935218, -0.4868864906657553 ], [ -1.2041096720692723, -0.668274903292578, 1.3963020217148907 ], [ 0.12113958454705882, -1.996243080631666, -1.8954233886760488 ], [ -0.6062360655699788, -1.3184229321039327, -0.25599405936079184 ], [ 0.24630090344969355, 0.5922050939749753, -0.3295757734677782 ], ... ],
    	[ [ 0.9829005199338887, 0.7128006211048467, -1.4451916454281304 ], [ 1.186875506653702, 1.5693561855590856, 1.0551384921535878 ], [ 1.862589611705331, -1.7876922079122832, -1.516 ], [ 0.8191100064115754, -0.8785062758958867, 0.08304473020636464 ], [ -1.308214625780783, 1.1583256428993642, 0.7772133361755944 ], [ 0.5930522742156175, -0.1767178355271521, -0.8418058024114589 ], [ -1.7719934588888713, -0.9126873353478125, 1.189886526696645 ], [ 0.42232010360807465, -1.7089958075271054, 1.2822615045596244 ], ... ],
    	[ [ -0.08297369779253225, -1.3591352161940549, 0.3444830624068722 ], [ 0.13511970982814145, 1.0881422713897864, -0.21271509513988943 ], [ 1.341118307005106, -1.7626961846619682, 0.7832811961454426 ], [ 0.7229835941149606, 0.20804417213224036, -1.2063922908663092 ], [ -1.4395407500268877, 1.9843845969682787, -1.1435195692966251 ], [ 1.8592688380177111, -1.704006722018813, 0.40564593110703867 ], [ 1.0545097014832137, -0.9449094930971963, -0.51465849051324 ], [ -0.14334548691458582, 0.267402540305808, 0.8317457134340311 ], ... ],
    	...
    ]

To produce the following output:

TrainingTester.java:633 executed in 0.12 seconds (0.000 gc):

    Result[] array = ConstantResult.batchResultArray(pop(RefUtil.addRef(data)));
    @Nullable
    Result eval = layer.eval(array);
    assert eval != null;
    TensorList tensorList = Result.getData(eval);
    String temp_18_0016 = tensorList.stream().limit(1).map(x -> {
      String temp_18_0017 = x.prettyPrint();
      x.freeRef();
      return temp_18_0017;
    }).reduce((a, b) -> a + "\n" + b).orElse("");
    tensorList.freeRef();
    return temp_18_0016;

Returns

    [
    	[ [ -0.2216784535499116, 0.013879105666256965, 1.6714155168386906 ], [ 0.0030439824504758027, 3.237995584147486, -0.5791801648204863 ], [ -1.763768085964064, -0.4999129139483262, 0.027592503971527415 ], [ -0.570805888679255, -0.003424361334830741, 2.4832924340560703 ], [ 0.11069559701949101, 0.04396016387724073, 0.2621072555676141 ], [ 0.021780432847997497, -0.34114876937482447, -1.1387125526506006 ], [ -0.002618392948829272, 0.05322653649982012, 0.31144107501904544 ], [ -0.05789477224534354, -4.1762856571933735, 0.10297237305350285 ], ... ],
    	[ [ 0.0025826236300111214, 1.5313216900127418, 1.9815313922421582 ], [ 0.116435183854713, -0.1574068917504655, -0.7581880133418483 ], [ -4.208790844848692, 0.4284083100109666, 0.012683220129221436 ], [ -1.7062066207298412, -1.6310220162672346, -0.15064887436793672 ], [ -1.592255280045664, 2.2021788271292193, -0.028438922492810628 ], [ 0.5260619286880861, -2.475779244836043, 0.9323211548965961 ], [ 3.1300435336722376, 2.4202269083648216, -0.04234210553823681 ], [ 0.21606063800200725, -0.055358949352049915, -0.06401887175664925 ], ... ],
    	[ [ -0.20808747867910432, -1.8797825741287872, -0.3292988372440335 ], [ -0.016761592775077334, 0.6113742523792633, -3.0733669962134216 ], [ -3.064192330776278, -1.836494386050823, -1.8562674793422949 ], [ -2.7557536191988405, -0.5391262740514242, -3.9335145046716558 ], [ -2.4346210757882454, -0.34143585929147807, 0.5317603112790324 ], [ -0.5212814983030067, 0.5813980331156535, -0.4213753906599427 ], [ 0.7637263525959689, -0.7050958883017533, -2.2798849528347347 ], [ 0.5681008064370181, 0.14083015551982814, -0.41807436506715456 ], ... ],
    	[ [ -0.5825769754771887, 0.03913898882265009, -2.517220029137869 ], [ 0.18596885928593687, 3.460862576991336, -0.14367519763884654 ], [ 0.20667037386072457, 0.23254480458214458, 0.20106115573634592 ], [ 0.15613390701766192, 0.0108057273733007, -0.2521032815737971 ], [ 2.6118395188089782, -1.4365547404893027, 1.6384739408303286 ], [ 3.154960287971271, 0.121327529183666, -0.33901637183298167 ], [ -0.506424765720648, 0.18885582630396222, 1.8608361401816629 ], [ -3.303926998606544, 0.15514862473155108, -0.18868217143124325 ], ... ],
    	[ [ 0.014799214021449668, 0.2733324215267287, 1.0317318083780411 ], [ -0.23911229446188131, -4.240358122575321, 0.35541683443015665 ], [ -2.2790639762060914, -0.011641864938541892, 0.011436166538685805 ], [ -0.6994729328700823, 0.12653232724811542, 0.1620491956970833 ], [ 0.5446380171375573, 1.2185226962847215, -3.356886631813278 ], [ -3.4337136285724648, -0.4186357578905258, 0.025773039125913257 ], [ -0.24708543565541904, -0.6191084080943424, 0.08118691351712896 ], [ 1.360369181702106, 3.3368900739154554, -0.5097839815048301 ], ... ],
    	[ [ -0.2180649592740849, -0.5449480913525683, 0.5894634457638214 ], [ 1.1674565863131963, -1.4162008220362023, 0.010898942911743774 ], [ -0.11864917516002577, 2.9911220560327503, 0.34607973319630336 ], [ -0.19025713999641602, 0.15334293007227215, -0.7530624252147369 ], [ 0.08083001029993271, 0.013078715948704998, 0.9073502931116921 ], [ -0.17136551773615652, 0.16701194621484708, -0.4866358969585033 ], [ -1.1823050242110489, 1.775507564139697, -0.00213719357736965 ], [ -0.3370368872114297, -0.09887871263255994, 0.7560061898506197 ], ... ],
    	[ [ -3.4072448807733635, -0.9060307511848795, -1.5553440752712835 ], [ 0.30313486522449545, 0.8910898850701656, -2.4535127738377986 ], [ -4.515413769849472, 1.2914947834386603, 0.01940340941866542 ], [ 0.28931249461403125, -0.09215703170139229, 0.1230736423927646 ], [ 0.11815914507660606, -0.2350214702671629, -1.181189257692094 ], [ 0.33595406746143125, -0.08745322794215281, 0.2538510094483087 ], [ -0.059923149022558575, -0.046185069168521165, -0.8333669657295834 ], [ -0.5672912429240398, -0.4608387747827603, -0.07729539582826192 ], ... ],
    	[ [ -0.15821789112135523, -0.03201774079965577, -0.07279060364143776 ], [ 0.05941140319352598, 0.1712545937976942, -0.2970878231606954 ], [ -0.16318311841941968, 1.0914211353179082, 0.4203976225571701 ], [ 0.16722522509953092, 0.04385235140382118, 1.1543992524405875 ], [ -1.5115966916788623, -0.21330727384605494, 0.6787835052324205 ], [ 0.4388011654254634, 0.0145026352735248, -0.3420128647175179 ], [ -0.4383839506758006, 0.9755415621384791, 0.586325135817365 ], [ 0.21280187866703992, 0.13786269984201024, -0.023018451598100568 ], ... ],
    	...
    ]

TrainingTester.java:432 executed in 0.14 seconds (0.000 gc):

    return TestUtil.compare(title + " vs Iteration", runs);
Logging
Plotting range=[0.0, 0.18557788933622762], [2.0, 0.6006833225379824]; valueStats=DoubleSummaryStatistics{count=3, sum=9.507807, min=1.533126, average=3.169269, max=3.987340}
Only 1 points for GD
Only 1 points for CjGD
Only 1 points for LBFGS

Returns

Result

TrainingTester.java:435 executed in 0.02 seconds (0.000 gc):

    return TestUtil.compareTime(title + " vs Time", runs);
Logging
Plotting range=[-1.0, 0.18557788933622762], [1.0, 0.6006833225379824]; valueStats=DoubleSummaryStatistics{count=3, sum=9.507807, min=1.533126, average=3.169269, max=3.987340}
Only 1 points for GD
Only 1 points for CjGD
Only 1 points for LBFGS

Returns

Result

Results

TrainingTester.java:255 executed in 0.00 seconds (0.000 gc):

    return grid(inputLearning, modelLearning, completeLearning);

Returns

Result

TrainingTester.java:258 executed in 0.00 seconds (0.000 gc):

    return new ComponentResult(null == inputLearning ? null : inputLearning.value,
        null == modelLearning ? null : modelLearning.value, null == completeLearning ? null : completeLearning.value);

Returns

    {"input":{ "LBFGS": { "type": "NonConverged", "value": 3.9873404871414286 }, "CjGD": { "type": "NonConverged", "value": 1.5331261430129999 }, "GD": { "type": "NonConverged", "value": 3.9873404871414286 } }, "model":null, "complete":null}

LayerTests.java:425 executed in 0.00 seconds (0.000 gc):

    throwException(exceptions.addRef());

Results

detailsresult
{"input":{ "LBFGS": { "type": "NonConverged", "value": 3.9873404871414286 }, "CjGD": { "type": "NonConverged", "value": 1.5331261430129999 }, "GD": { "type": "NonConverged", "value": 3.9873404871414286 } }, "model":null, "complete":null}OK
  {
    "result": "OK",
    "performance": {
      "execution_time": "222.576",
      "gc_time": "31.281"
    },
    "created_on": 1586737494257,
    "file_name": "trainingTest",
    "report": {
      "simpleName": "Double3",
      "canonicalName": "com.simiacryptus.mindseye.layers.cudnn.NProductLayerTest.Double3",
      "link": "https://github.com/SimiaCryptus/mindseye-cudnn/tree/59d5b3318556370acb2d83ee6ec123ce0fc6974f/src/test/java/com/simiacryptus/mindseye/layers/cudnn/NProductLayerTest.java",
      "javaDoc": ""
    },
    "training_analysis": {
      "input": {
        "LBFGS": {
          "type": "NonConverged",
          "value": 3.9873404871414286
        },
        "CjGD": {
          "type": "NonConverged",
          "value": 1.5331261430129999
        },
        "GD": {
          "type": "NonConverged",
          "value": 3.9873404871414286
        }
      }
    },
    "archive": "s3://code.simiacrypt.us/tests/com/simiacryptus/mindseye/layers/cudnn/NProductLayer/Double3/trainingTest/202004132454",
    "id": "8de4a331-5297-44cc-a2e7-c2a3954192fe",
    "report_type": "Components",
    "display_name": "Comparative Training",
    "target": {
      "simpleName": "NProductLayer",
      "canonicalName": "com.simiacryptus.mindseye.layers.cudnn.NProductLayer",
      "link": "https://github.com/SimiaCryptus/mindseye-cudnn/tree/59d5b3318556370acb2d83ee6ec123ce0fc6974f/src/main/java/com/simiacryptus/mindseye/layers/cudnn/NProductLayer.java",
      "javaDoc": ""
    }
  }