1. Test Modules
  2. Training Characteristics
    1. Input Learning
      1. Gradient Descent
      2. Conjugate Gradient Descent
      3. Limited-Memory BFGS
    2. Results
  3. Results

Subreport: Logs for com.simiacryptus.ref.lang.ReferenceCountingBase

Test Modules

Using Seed 6224807018825536512

Training Characteristics

Input Learning

In this apply, we use a network to learn this target input, given it's pre-evaluated output:

TrainingTester.java:332 executed in 0.04 seconds (0.000 gc):

    return RefArrays.stream(RefUtil.addRef(input_target)).flatMap(RefArrays::stream).map(x -> {
      try {
        return x.prettyPrint();
      } finally {
        x.freeRef();
      }
    }).reduce((a, b) -> a + "\n" + b).orElse("");

Returns

    [
    	[ [ -1.716, -0.456, -0.02 ], [ 0.776, -1.072, 1.74 ], [ 0.168, 0.208, 1.192 ], [ 0.248, 1.18, 0.24 ], [ -1.352, -0.752, 1.932 ], [ 1.18, -0.624, 1.732 ], [ 0.88, -1.016, 1.94 ], [ -1.632, -0.916, 0.944 ], ... ],
    	[ [ 1.996, -0.816, 0.252 ], [ -0.132, -0.144, -1.784 ], [ -1.588, -1.432, 0.464 ], [ -0.524, -0.48, -0.244 ], [ 1.296, 1.344, 1.072 ], [ -0.112, -0.784, 1.476 ], [ -0.528, 0.98, 0.688 ], [ -0.312, 0.872, -1.04 ], ... ],
    	[ [ -1.796, 1.564, 0.824 ], [ -0.504, 0.02, 1.952 ], [ -0.052, 1.72, 1.02 ], [ -0.144, -1.604, -0.064 ], [ 0.704, 0.228, -0.916 ], [ 0.392, -1.112, 1.172 ], [ 0.38, -1.748, -1.768 ], [ 0.308, 0.304, -1.932 ], ... ],
    	[ [ 0.424, -1.012, -0.54 ], [ 0.88, 0.236, 0.152 ], [ 0.048, -1.5, 1.8 ], [ 0.188, -1.62, -0.164 ], [ 1.22, 0.276, -0.292 ], [ -0.304, 0.044, -1.82 ], [ 1.484, 0.148, -0.552 ], [ 1.54, 1.012, 0.492 ], ... ],
    	[ [ -0.872, -1.536, 0.86 ], [ 0.848, 1.716, -0.64 ], [ 1.884, -1.332, 0.44 ], [ 1.764, -0.16, -1.44 ], [ 1.3, 0.688, 0.632 ], [ -0.856, 0.32, 0.528 ], [ -0.316, 0.748, 1.948 ], [ 0.336, -0.576, -1.948 ], ... ],
    	[ [ -0.516, 1.996, -1.976 ], [ -1.62, 0.052, 0.552 ], [ 1.272, 0.892, 0.772 ], [ 0.888, 1.008, 1.56 ], [ -0.56, -1.856, 1.58 ], [ 1.756, 1.964, 0.516 ], [ -1.344, 1.196, -1.172 ], [ 1.524, 1.5, 0.536 ], ... ],
    	[ [ 1.932, -0.04, 0.876 ], [ -1.332, 1.736, 1.452 ], [ 1.524, 0.576, 0.648 ], [ -0.2, 1.352, -0.848 ], [ 0.012, 1.008, 1.644 ], [ 2.0, 0.864, -1.892 ], [ -0.82, 0.748, 0.184 ], [ -1.272, 1.22, 1.316 ], ... ],
    	[ [ -0.124, 0.548, -1.38 ], [ -0.116, 1.852, -1.204 ], [ 1.184, -0.044, -0.436 ], [ -1.2, 0.708, 0.524 ], [ 1.268, -1.58, 1.232 ], [ 0.204, -0.216, 1.232 ], [ -1.728, -1.732, 0.26 ], [ 1.268, -1.5, 0.608 ], ... ],
    	...
    ]
    [
    	[ [ 0.02, -0.956, -0.544 ], [ 0.072, 1.292, -1.576 ], [ -0.012, 0.488, 0.428 ], [ -1.304, 1.168, 1.316 ], [ 0.304, 0.128, 0.676 ], [ 1.12, 0.196, -0.524 ], [ -0.304, -0.328, 0.984 ], [ -1.68, 0.572, 1.216 ], ... ],
    	[ [ 1.312, 0.096, 0.372 ], [ -1.372, -0.808, -0.396 ], [ 0.492, -0.412, -0.876 ], [ 1.692, -1.636, -0.948 ], [ -1.076, -0.292, -1.272 ], [ -1.756, 1.212, -1.596 ], [ 0.524, 0.46, 0.496 ], [ 1.028, 0.876, -1.8 ], ... ],
    	[ [ 1.356, -1.868, 0.224 ], [ 1.216, 1.484, -1.356 ], [ 1.364, -0.54, 0.568 ], [ -0.404, 1.616, -0.108 ], [ 0.356, 0.944, -0.092 ], [ -1.52, -1.532, 1.5 ], [ 1.384, -0.264, -0.832 ], [ -0.288, 0.468, 1.328 ], ... ],
    	[ [ 1.684, 0.044, -1.408 ], [ 1.152, -1.848, -1.152 ], [ -0.06, 0.572, 1.98 ], [ -0.324, -1.892, -0.976 ], [ -1.516, 0.952, 0.788 ], [ -0.708, -0.128, -1.944 ], [ -1.808, 0.052, -0.832 ], [ 1.452, -0.428, 1.664 ], ... ],
    	[ [ 1.996, 0.688, 1.904 ], [ -0.74, -1.348, -0.712 ], [ -0.268, 0.136, -1.828 ], [ -0.968, -1.964, -0.12 ], [ -0.72, -1.524, 1.864 ], [ 1.72, 1.404, 1.396 ], [ 0.764, 0.98, -1.7 ], [ -1.972, -1.452, -1.42 ], ... ],
    	[ [ -0.176, 0.152, -0.832 ], [ 0.896, -1.876, -0.076 ], [ -0.02, -1.384, -0.008 ], [ -1.628, -0.932, -1.26 ], [ -1.568, -0.824, 0.764 ], [ -0.488, 1.232, -1.508 ], [ -0.14, -1.58, 0.856 ], [ 1.112, -1.432, -0.18 ], ... ],
    	[ [ 1.988, 0.076, -0.412 ], [ 0.44, -1.216, -1.004 ], [ -1.68, -0.608, -0.48 ], [ 1.48, -0.376, -1.292 ], [ -0.376, -1.304, 1.132 ], [ 1.244, -1.956, -0.936 ], [ 0.3, 0.856, -1.072 ], [ -0.424, 1.36, -0.832 ], ... ],
    	[ [ 1.248, 1.104, -1.136 ], [ -0.752, 0.48, -0.1 ], [ -1.792, 0.256, -0.784 ], [ -1.708, -0.256, -1.74 ], [ -0.752, 1.088, 1.012 ], [ -1.096, -1.668, 0.6 ], [ -1.036, 1.552, -0.716 ], [ -1.236, 0.556, -0.876 ], ... ],
    	...
    ]
    [
    	[ [ -0.008, 1.228, 1.224 ], [ 0.004, 1.216, 1.844 ], [ 1.872, -0.98, -0.748 ], [ 0.088, 0.304, 0.712 ], [ 1.428, -0.592, 1.584 ], [ -1.416, -1.372, -0.36 ], [ 1.496, -0.648, 0.244 ], [ -1.844, 0.324, -1.076 ], ... ],
    	[ [ 0.868, 1.448, 0.468 ], [ -0.516, -0.808, -0.904 ], [ 1.372, -1.324, 1.352 ], [ -1.18, 1.46, 1.444 ], [ 1.836, -0.312, -0.54 ], [ 1.124, -0.532, 0.56 ], [ 2.0, 1.34, 0.48 ], [ 1.044, -1.604, 1.4 ], ... ],
    	[ [ 1.972, -1.676, 1.76 ], [ -0.176, 0.804, 0.86 ], [ 1.848, 0.648, 1.232 ], [ -1.76, -0.204, 0.904 ], [ -0.328, -1.38, -1.492 ], [ -1.86, 1.796, 0.412 ], [ 0.036, -0.364, -0.652 ], [ 1.152, -0.84, 1.736 ], ... ],
    	[ [ -1.916, 1.688, 0.308 ], [ -1.0, -0.7, 0.74 ], [ 1.46, 1.12, -1.328 ], [ 1.424, -1.052, -1.068 ], [ -0.652, -1.792, -0.68 ], [ -0.336, 0.572, 1.792 ], [ -1.996, -0.876, -1.4 ], [ -0.128, 1.86, -1.444 ], ... ],
    	[ [ -1.972, -0.312, -1.988 ], [ -1.86, -0.012, -0.496 ], [ 1.848, 0.716, -0.856 ], [ 0.196, -1.844, -1.432 ], [ -1.768, 1.832, 0.66 ], [ 1.896, 0.836, 0.844 ], [ -1.196, 0.816, -0.264 ], [ -1.144, -1.16, -0.2 ], ... ],
    	[ [ -1.832, 0.168, -0.392 ], [ 1.768, -1.84, -1.92 ], [ -0.024, -1.132, 1.712 ], [ 0.744, -1.736, -0.552 ], [ -1.56, 1.664, -0.664 ], [ -1.628, -0.672, 1.348 ], [ -0.168, -0.616, 0.172 ], [ 0.664, -0.652, 1.924 ], ... ],
    	[ [ 1.62, -1.556, -0.956 ], [ 0.584, 1.612, 0.272 ], [ -0.492, 1.468, -0.404 ], [ -1.052, -0.388, 1.288 ], [ 1.52, -1.5, -1.08 ], [ 0.584, 0.292, 0.444 ], [ 0.172, 0.62, 1.948 ], [ -1.176, 0.12, -0.292 ], ... ],
    	[ [ 1.856, -1.248, 0.54 ], [ -1.548, 0.796, -0.876 ], [ 1.708, -0.196, -0.196 ], [ 0.072, -1.964, -0.472 ], [ 1.192, 0.24, 1.296 ], [ -0.22, 1.184, -0.7 ], [ 1.024, -1.4, -0.084 ], [ -0.248, 0.172, -1.236 ], ... ],
    	...
    ]
    [
    	[ [ 1.408, 0.372, 0.444 ], [ -0.488, 0.216, -0.448 ], [ 1.304, -1.536, 0.112 ], [ 0.596, -0.988, 1.7 ], [ 1.048, -0.424, 1.444 ], [ -1.372, -0.988, 1.848 ], [ 0.084, -1.9, 1.164 ], [ 1.112, 1.18, 0.868 ], ... ],
    	[ [ 1.712, -1.828, 0.32 ], [ 0.684, 0.324, -0.78 ], [ 0.524, 0.256, -1.032 ], [ -0.336, -0.732, -0.932 ], [ 0.476, 1.264, 0.504 ], [ 0.82, -1.332, -1.196 ], [ -0.632, -1.316, 1.5 ], [ 1.172, -1.896, -1.572 ], ... ],
    	[ [ 0.34, 0.164, -0.128 ], [ -0.28, 0.884, 1.924 ], [ -0.704, 1.004, 0.088 ], [ -1.924, -1.464, -0.092 ], [ 0.924, 1.756, -1.132 ], [ 1.896, -1.996, -0.616 ], [ 1.46, 1.42, 1.188 ], [ 1.824, -1.624, 0.572 ], ... ],
    	[ [ 0.232, 0.872, 1.72 ], [ 1.408, -0.652, -0.28 ], [ -1.248, -1.956, 0.66 ], [ 0.368, 0.832, -0.332 ], [ -1.632, 1.988, 0.444 ], [ 1.356, 0.476, -0.784 ], [ -0.368, -1.524, 1.296 ], [ 1.256, -0.272, 0.548 ], ... ],
    	[ [ 1.124, -0.676, -0.424 ], [ 0.508, 1.764, -1.852 ], [ 0.82, -1.524, -1.712 ], [ -0.484, 0.412, 0.112 ], [ -1.42, 0.86, 0.084 ], [ -0.112, 0.144, -0.288 ], [ 1.328, -0.048, -1.68 ], [ -1.12, -1.288, 0.54 ], ... ],
    	[ [ 0.328, -0.752, 0.868 ], [ -0.644, -0.432, 1.176 ], [ 0.688, -0.264, 0.024 ], [ -0.38, -1.38, -0.244 ], [ -0.072, -0.468, 0.696 ], [ -1.336, -0.772, -1.416 ], [ -1.024, -0.392, -1.592 ], [ -0.468, -1.652, 0.212 ], ... ],
    	[ [ 0.796, 1.216, 0.636 ], [ -1.72, 1.984, 0.056 ], [ -0.276, -0.38, -1.984 ], [ 1.668, -1.996, -0.964 ], [ 1.568, -0.864, 0.612 ], [ 1.108, -1.944, -0.088 ], [ 0.34, 1.376, 1.368 ], [ -0.396, -0.78, -0.964 ], ... ],
    	[ [ 0.76, -0.208, -0.328 ], [ -0.348, 1.456, -1.96 ], [ 1.624, -1.436, -0.248 ], [ -1.168, 1.936, 0.964 ], [ 0.936, 0.776, -0.468 ], [ -0.676, -1.316, 0.236 ], [ 1.952, -0.524, -1.324 ], [ 1.404, 1.264, 1.132 ], ... ],
    	...
    ]
    [
    	[ [ -1.904, -1.892, -0.468 ], [ 0.72, -0.272, -0.84 ], [ -1.728, -1.716, -1.98 ], [ -0.584, -1.2, -0.928 ], [ 1.072, -0.6, -1.492 ], [ 0.392, -0.116, -0.008 ], [ -0.156, -1.12, -1.188 ], [ 0.5, -0.992, 1.128 ], ... ],
    	[ [ 0.052, -0.568, 1.896 ], [ 1.292, 0.952, -1.752 ], [ -0.104, -1.884, -1.528 ], [ -0.56, -1.944, 0.616 ], [ 1.36, 1.284, -1.144 ], [ -0.984, 0.48, -0.728 ], [ -0.64, -0.296, 1.44 ], [ 1.3, 1.196, -1.42 ], ... ],
    	[ [ -1.108, 0.696, 1.564 ], [ 0.468, -1.084, 1.884 ], [ 0.372, 1.04, -0.236 ], [ 0.944, -1.056, -0.456 ], [ -1.5, -0.416, 0.58 ], [ 1.036, -1.776, -0.888 ], [ 1.8, -0.572, -0.296 ], [ -0.124, -1.092, -0.8 ], ... ],
    	[ [ -1.128, -0.312, -1.26 ], [ 1.9, 2.0, -0.256 ], [ -1.096, -1.204, 0.428 ], [ 0.696, 1.892, 1.884 ], [ 1.408, 0.644, -0.2 ], [ -1.936, -1.3, -0.948 ], [ -0.864, -1.176, 0.02 ], [ -1.28, 1.396, -1.784 ], ... ],
    	[ [ 1.696, -1.128, -1.216 ], [ 1.044, -0.372, 1.632 ], [ 0.076, 0.604, 0.376 ], [ -1.74, -1.012, -1.228 ], [ 1.96, -1.82, 0.192 ], [ -0.636, 1.284, -1.628 ], [ -1.728, 1.652, 1.312 ], [ -1.92, -0.136, 0.06 ], ... ],
    	[ [ -1.188, 1.22, -0.4 ], [ -1.18, -0.292, -1.968 ], [ -0.196, -0.6, -0.076 ], [ 1.016, -0.196, 0.236 ], [ -1.804, 0.072, 0.424 ], [ -0.06, 1.09

...skipping 1013 bytes...

    , 0.808, -1.816 ], [ -0.948, -1.548, 1.076 ], [ -1.712, -0.072, 0.768 ], [ 0.868, 0.968, -1.724 ], [ -1.248, -0.58, -1.036 ], [ 0.16, 0.864, 0.752 ], ... ],
    	[ [ 1.748, -1.628, -0.46 ], [ 1.308, -0.728, -1.844 ], [ 1.548, 1.284, -1.924 ], [ -0.292, 0.928, -0.504 ], [ 0.62, -0.776, 0.612 ], [ 1.456, 1.604, 1.68 ], [ -0.016, 1.632, 0.288 ], [ 0.904, 1.852, 1.184 ], ... ],
    	[ [ 0.436, -1.46, -1.904 ], [ -1.788, -0.012, -1.824 ], [ -0.34, -0.36, -1.732 ], [ -0.964, 0.212, 0.884 ], [ 0.636, 1.016, 1.212 ], [ 1.868, 0.652, -1.6 ], [ -0.832, -0.48, -1.928 ], [ 0.508, 1.388, 0.464 ], ... ],
    	[ [ 1.924, -0.984, 0.952 ], [ 0.58, -1.576, -1.072 ], [ 1.58, 0.2, 1.712 ], [ -1.572, 0.42, 0.148 ], [ 1.576, 0.54, 0.256 ], [ 1.552, -1.9, 1.264 ], [ -0.884, -0.032, 0.084 ], [ 1.656, 0.816, -0.604 ], ... ],
    	[ [ -1.596, 1.076, -0.088 ], [ -0.028, 0.28, -1.744 ], [ -0.06, 1.524, -0.832 ], [ -1.696, -0.568, -1.912 ], [ -0.304, -0.016, -0.94 ], [ 0.652, -1.556, 1.944 ], [ -0.8, -0.816, 0.988 ], [ -1.664, 1.196, 1.164 ], ... ],
    	[ [ -0.688, 0.976, -1.924 ], [ 1.304, 0.284, -1.96 ], [ -0.708, 0.7, -1.208 ], [ 1.188, 1.324, -1.944 ], [ 1.02, 0.5, -0.368 ], [ 1.776, 0.664, -0.356 ], [ 0.692, -0.852, 0.828 ], [ -1.396, 1.8, -0.984 ], ... ],
    	...
    ]
    [
    	[ [ -1.876, -0.508, -1.868 ], [ -0.128, -0.94, 0.376 ], [ 1.728, -0.976, -1.06 ], [ -1.544, -0.544, -1.756 ], [ 0.78, 0.596, 0.496 ], [ -1.748, -1.44, -1.612 ], [ 1.552, 1.428, -0.512 ], [ 1.06, 0.7, -0.532 ], ... ],
    	[ [ -1.472, -0.776, 0.496 ], [ 1.484, 1.656, -0.064 ], [ -0.136, 1.34, 0.416 ], [ -0.004, 0.692, 1.324 ], [ -0.812, 0.236, 0.408 ], [ 0.292, -0.98, 0.012 ], [ -1.676, 1.444, 0.716 ], [ 1.188, -1.76, 0.148 ], ... ],
    	[ [ 1.604, -1.864, 1.508 ], [ -0.936, 1.348, -0.252 ], [ -0.912, 1.748, -1.536 ], [ -0.404, 0.584, -0.448 ], [ 0.964, -1.276, -1.9 ], [ 1.304, 0.308, 0.236 ], [ 0.388, -1.752, -1.604 ], [ 0.512, -0.256, -1.504 ], ... ],
    	[ [ -1.34, 0.252, -1.672 ], [ 1.216, 1.012, -1.592 ], [ 1.336, -0.632, -0.532 ], [ -1.008, 0.792, -1.476 ], [ -0.672, 0.208, 0.924 ], [ 0.772, 1.664, 0.74 ], [ -0.26, 0.864, 0.12 ], [ -1.924, 1.036, 1.908 ], ... ],
    	[ [ -1.476, -0.184, -0.136 ], [ -0.912, -0.276, 1.1 ], [ 1.496, -1.508, -1.228 ], [ -0.56, 0.656, -0.372 ], [ 0.9, 0.952, -0.876 ], [ -1.62, 0.348, -1.884 ], [ 0.56, -1.292, 0.096 ], [ -1.232, -1.336, 0.592 ], ... ],
    	[ [ -0.636, 1.348, -0.296 ], [ 1.224, 0.724, -0.444 ], [ 1.348, 1.988, 0.728 ], [ 0.652, 0.568, 1.664 ], [ -0.232, -1.564, 1.74 ], [ 0.692, -1.664, 1.16 ], [ 1.524, 0.376, 1.792 ], [ -0.912, -1.936, 0.176 ], ... ],
    	[ [ -0.32, 1.064, 0.3 ], [ 1.568, 0.952, 1.88 ], [ 1.552, 1.724, 0.652 ], [ -0.78, -0.796, -0.796 ], [ 0.832, 1.436, 0.58 ], [ -0.796, 0.312, 1.156 ], [ 0.692, -0.528, -0.552 ], [ -0.164, 1.476, -0.596 ], ... ],
    	[ [ 1.124, -0.5, -1.38 ], [ -1.112, 0.036, -0.264 ], [ -1.732, -0.752, -0.724 ], [ 0.524, -0.804, 0.58 ], [ 1.18, 0.4, 1.04 ], [ 1.684, 1.228, 1.532 ], [ -1.496, -0.948, 0.476 ], [ 1.972, -0.008, -1.3 ], ... ],
    	...
    ]
    [
    	[ [ -1.708, 0.14, -1.264 ], [ -0.512, 1.676, 1.412 ], [ -1.564, 1.124, -0.932 ], [ -1.78, 0.192, -1.32 ], [ -0.708, 0.572, -0.172 ], [ -1.672, -1.736, 1.672 ], [ -1.06, -0.528, -1.572 ], [ 0.036, 0.256, 0.584 ], ... ],
    	[ [ -0.188, -0.54, 1.624 ], [ -1.064, 0.868, -0.884 ], [ 0.824, 0.32, -1.824 ], [ -0.464, 1.344, -0.804 ], [ -1.34, 1.252, 1.64 ], [ 0.804, 1.704, 1.336 ], [ 0.592, 0.656, 0.552 ], [ -0.572, 0.912, -1.444 ], ... ],
    	[ [ 0.472, -1.712, 1.968 ], [ -0.708, -1.98, -0.448 ], [ 1.252, -1.604, -0.672 ], [ -0.612, 0.604, -0.084 ], [ -1.14, 1.128, -1.188 ], [ 1.556, 0.568, -0.168 ], [ -0.18, -0.02, 0.896 ], [ -1.684, -1.5, 1.336 ], ... ],
    	[ [ -0.88, 0.82, 1.324 ], [ -1.048, 0.796, -1.58 ], [ -0.176, -1.84, 1.12 ], [ -0.716, 1.36, 0.5 ], [ -0.516, 1.12, -0.016 ], [ 0.196, -0.688, -1.144 ], [ 1.7, -1.72, -0.768 ], [ -0.044, -1.208, -0.708 ], ... ],
    	[ [ -0.136, 1.864, -0.408 ], [ 0.444, -0.972, 1.672 ], [ -1.244, 1.072, -0.712 ], [ 0.432, -0.332, 1.312 ], [ 1.504, 0.82, 0.224 ], [ 0.448, 1.112, 1.06 ], [ -1.148, -0.58, 0.984 ], [ 0.348, 1.108, -0.884 ], ... ],
    	[ [ -0.392, -1.82, -1.836 ], [ -1.544, 1.676, 0.0 ], [ -0.612, -0.36, -0.18 ], [ -1.288, -0.992, 0.768 ], [ -1.948, -1.744, 0.172 ], [ -1.688, -0.324, -0.02 ], [ 0.396, -0.148, 0.548 ], [ 0.204, -0.236, -0.196 ], ... ],
    	[ [ -1.344, -0.392, 1.392 ], [ 1.96, 1.952, 1.26 ], [ 0.288, 1.348, 1.32 ], [ -1.408, 0.032, 0.248 ], [ 1.964, -0.492, 0.964 ], [ -0.628, 1.58, 0.116 ], [ 1.132, -0.112, -1.956 ], [ -0.92, -0.036, 0.532 ], ... ],
    	[ [ 0.8, 1.78, 1.08 ], [ -1.528, -0.58, 1.472 ], [ 1.248, 0.016, 1.468 ], [ -1.112, 1.604, 0.376 ], [ 0.46, 0.468, -1.484 ], [ 0.124, 0.732, 1.796 ], [ 0.76, 0.852, -1.656 ], [ 0.708, 1.136, 1.244 ], ... ],
    	...
    ]
    [
    	[ [ -1.74, 0.468, -0.644 ], [ 1.404, 1.56, 0.024 ], [ 0.448, -0.528, -1.616 ], [ 1.868, -0.92, 0.444 ], [ -1.164, -0.504, 0.1 ], [ -0.092, -0.424, 0.972 ], [ -1.324, 0.3, 1.568 ], [ 0.44, 0.852, -1.6 ], ... ],
    	[ [ -0.14, 0.056, -0.2 ], [ -1.592, 1.652, -0.852 ], [ 1.488, 0.94, 0.148 ], [ 0.232, -1.948, 0.256 ], [ -0.796, 1.204, 1.22 ], [ -1.728, -0.64, -1.652 ], [ -0.372, -1.416, -0.264 ], [ 0.2, 1.12, -1.832 ], ... ],
    	[ [ 0.148, 0.804, 0.356 ], [ -1.164, -1.584, -0.908 ], [ 1.108, 1.092, -1.564 ], [ -0.836, -1.54, -1.26 ], [ 0.336, -0.332, -1.584 ], [ -0.912, -0.26, 1.64 ], [ 1.684, -1.536, 0.584 ], [ 0.608, 0.488, 0.356 ], ... ],
    	[ [ 0.12, -1.392, -0.132 ], [ 0.568, 1.488, 0.368 ], [ 0.896, -1.492, 0.808 ], [ 1.612, 0.028, 0.608 ], [ -1.3, 0.232, -1.516 ], [ -1.328, 1.84, -1.76 ], [ -1.16, 0.496, -1.876 ], [ 0.236, -1.944, -1.608 ], ... ],
    	[ [ -0.672, 1.756, -0.44 ], [ 1.088, 0.732, 1.876 ], [ -1.516, 0.348, 1.272 ], [ 0.472, -0.132, 0.264 ], [ -0.688, -0.988, -0.392 ], [ -1.568, -0.02, -1.732 ], [ -1.548, 0.24, 0.544 ], [ 1.52, 1.324, -0.96 ], ... ],
    	[ [ 0.012, 1.52, -0.824 ], [ 0.612, 0.092, 1.468 ], [ 0.032, 1.972, -0.456 ], [ -0.312, -0.032, -0.268 ], [ -1.42, 0.5, 1.78 ], [ -1.728, 0.12, 1.316 ], [ -0.956, -0.664, 1.696 ], [ -0.556, -1.16, 1.044 ], ... ],
    	[ [ -0.444, 1.952, -0.38 ], [ 0.68, -1.58, 1.316 ], [ 0.364, -1.916, 1.14 ], [ 1.856, -0.876, 1.316 ], [ 0.116, -0.292, -0.428 ], [ -0.468, 0.384, 0.236 ], [ -1.056, -0.288, 0.856 ], [ 0.596, 0.896, -0.932 ], ... ],
    	[ [ -1.248, -0.152, 0.816 ], [ 1.988, 0.108, -1.808 ], [ -0.624, -0.096, -0.196 ], [ 1.496, 1.064, -0.972 ], [ 1.556, -1.204, 1.968 ], [ 1.336, -0.852, 0.864 ], [ 0.96, 0.264, -1.68 ], [ 0.656, 0.124, -0.456 ], ... ],
    	...
    ]
    [
    	[ [ -1.7, -0.364, 0.66 ], [ -1.556, -1.124, -0.104 ], [ 1.352, -1.196, -0.184 ], [ -0.908, 1.764, 0.52 ], [ 1.344, -1.18, 1.208 ], [ 1.484, -0.188, 1.452 ], [ -1.696, -1.256, -0.02 ], [ 1.028, -0.888, -1.892 ], ... ],
    	[ [ 0.916, -0.22, 1.284 ], [ -0.856, -0.38, 1.02 ], [ 0.436, 1.876, -1.74 ], [ 0.268, 1.376, -1.272 ], [ -1.896, 1.828, -0.1 ], [ -1.532, -0.616, -0.9 ], [ 1.272, -1.396, 0.38 ], [ 0.772, 1.536, -0.972 ], ... ],
    	[ [ 1.456, 0.064, 1.596 ], [ 1.096, 0.24, 1.184 ], [ 0.352, -1.672, -1.368 ], [ 1.656, -1.236, 0.644 ], [ 1.804, 0.312, -0.956 ], [ -0.544, -1.168, 0.86 ], [ 0.784, 1.148, 0.984 ], [ -0.456, 1.512, 1.572 ], ... ],
    	[ [ 0.512, 1.412, 1.384 ], [ -0.316, 1.876, -1.088 ], [ -1.252, 0.244, 1.18 ], [ 0.136, -1.132, 0.936 ], [ -0.96, 0.456, 0.284 ], [ 0.376, -1.28, -1.144 ], [ -1.52, -0.696, 1.276 ], [ 0.54, 0.076, -0.844 ], ... ],
    	[ [ 0.6, -1.412, -1.404 ], [ 0.604, 0.86, 1.792 ], [ -1.148, -1.32, 0.992 ], [ -0.272, -0.452, 1.74 ], [ -0.724, -1.252, -1.408 ], [ -0.372, 0.584, 1.272 ], [ 1.592, -0.172, 1.236 ], [ -0.62, -1.264, -0.104 ], ... ],
    	[ [ 1.656, -0.628, -0.184 ], [ -0.504, -0.016, -1.684 ], [ 1.132, 1.724, -1.136 ], [ -0.032, 1.728, -0.84 ], [ -0.156, -0.888, -0.196 ], [ -0.38, -0.064, 0.62 ], [ 0.288, -1.292, 1.864 ], [ -1.304, 1.884, -1.476 ], ... ],
    	[ [ 0.096, 0.416, 0.224 ], [ -1.632, 1.364, 1.308 ], [ -0.676, 0.184, -1.44 ], [ 1.788, -1.596, -0.796 ], [ 0.748, -1.224, 1.216 ], [ 0.628, -0.176, 0.388 ], [ 0.128, 1.152, -0.784 ], [ -0.972, -1.892, 0.4 ], ... ],
    	[ [ -0.888, 1.3, 0.324 ], [ 1.896, -1.884, 1.812 ], [ 0.348, 0.476, 0.312 ], [ 0.28, -1.648, -1.124 ], [ -0.308, 0.3, 0.62 ], [ -1.504, -1.832, -1.156 ], [ -1.528, -0.76, -1.94 ], [ 0.948, -1.252, 0.852 ], ... ],
    	...
    ]

Gradient Descent

First, we train using basic gradient descent method apply weak line search conditions.

TrainingTester.java:480 executed in 38.12 seconds (5.174 gc):

    IterativeTrainer iterativeTrainer = new IterativeTrainer(trainable.addRef());
    try {
      iterativeTrainer.setLineSearchFactory(label -> new ArmijoWolfeSearch());
      iterativeTrainer.setOrientation(new GradientDescent());
      iterativeTrainer.setMonitor(TrainingTester.getMonitor(history));
      iterativeTrainer.setTimeout(30, TimeUnit.SECONDS);
      iterativeTrainer.setMaxIterations(250);
      iterativeTrainer.setTerminateThreshold(0);
      return iterativeTrainer.run();
    } finally {
      iterativeTrainer.freeRef();
    }
Logging
Reset training subject: 3153081582046
Reset training subject: 3155079780863
Constructing line search parameters: GD
th(0)=3.555083177749154;dx=-2.4578176780647247E-6
New Minimum: 3.555083177749154 > 3.555077882161754
WOLFE (weak): th(2.154434690031884)=3.555077882161754; dx=-2.4578139101780364E-6 evalInputDelta=5.2955873997007075E-6
New Minimum: 3.555077882161754 > 3.5550725870959594
WOLFE (weak): th(4.308869380063768)=3.5550725870959594; dx=-2.4578101431147933E-6 evalInputDelta=1.0590653194419986E-5
New Minimum: 3.5550725870959594 > 3.5550514064719607
WOLFE (weak): th(12.926608140191302)=3.5550514064719607; dx=-2.457795074683336E-6 evalInputDelta=3.1771277193115566E-5
New Minimum: 3.5550514064719607 > 3.554956094864879
WOLFE (weak): th(51.70643256076521)=3.554956094864879; dx=-2.4577272673882216E-6 evalInputDelta=1.2708288427498715E-4
New Minimum: 3.554956094864879 > 3.554447811079023
WOLFE (weak): th(258.53216280382605)=3.554447811079023; dx=-2.457365653670841E-6 evalInputDelta=6.353666701306437E-4
New Minimum: 3.554447811079023 > 3.551272731143033
WOLFE (weak): th(1551.1929768229563)=3.551272731143033; dx=-2.4551065276960744E-6 evalInputDelta=0.003810446606120621
New Minimum: 3.551272731143033 > 3.528498200285031
WOLFE (weak): th(10858.350837760694)=3.528498200285031; dx=-2.4388895985106823E-6 evalInputDelta=0.026584977464122694
New Minimum: 3.528498200285031 > 3.348070231870996
WOLFE (weak): th(86866.80670208555)=3.348070231870996; dx=-2.3096227808675794E-6 evalInputDelta=0.20701294587815777
New Minimum: 3.348070231870996 > 2.0948370301768464
END: th(781801.26031877)=2.0948370301768464; dx=-1.3638607793296401E-6 evalInputDelta=1.4602461475723074
Fitness changed from 3.555083177749154 to 2.0948370301768464
Iteration 1 complete. Error: 2.0948370301768464 Total: 38.1095; Orientation: 2.0273; Line Search: 31.0984
Final threshold in iteration 1: 2.0948370301768464 (> 0.0) after 38.111s (< 30.000s)

Returns

    2.0948370301768464

This training apply resulted in the following configuration:

TrainingTester.java:610 executed in 0.00 seconds (0.000 gc):

    RefList<double[]> state = network.state();
    assert state != null;
    String description = state.stream().map(RefArrays::toString).reduce((a, b) -> a + "\n" + b)
        .orElse("");
    state.freeRef();
    return description;

Returns

    

And regressed input:

TrainingTester.java:622 executed in 0.01 seconds (0.000 gc):

    return RefArrays.stream(RefUtil.addRef(data)).flatMap(x -> {
      return RefArrays.stream(x);
    }).limit(1).map(x -> {
      String temp_18_0015 = x.prettyPrint();
      x.freeRef();
      return temp_18_0015;
    }).reduce((a, b) -> a + "\n" + b).orElse("");

Returns

    [
    	[ [ 1.114128964040427, -0.5444743185721016, 0.13958164949374618 ], [ 0.4447117045678727, 0.18455992135499263, -1.1550894920329002 ], [ -0.9271515007035558, -1.187320247637766, 0.9887452640652843 ], [ 0.8669665974770782, -0.887545932201556, -1.0394657831541307 ], [ 1.3723107645736736, -0.4996707238189714, 1.2835513239854226 ], [ -0.06354509724714189, 0.15472853440306006, -1.3316698327768772 ], [ -1.4608389698375097, -1.8000272787010252, 0.8983333215521693 ], [ -1.9412936282867492, 1.817703839305624, -0.870767918558252 ], ... ],
    	[ [ 0.5723728493963148, 0.20265671447596129, 1.456064681531451 ], [ -1.5015348482147632, 1.4896838142683944, -1.005377150504087 ], [ 0.01711837147273479, -1.2499675762838705, 0.7030803567310698 ], [ -0.8673489014461553, -0.3576196776268742, -1.2982842211312189 ], [ 0.47339379815156835, -0.8503825787018818, -0.426758689612328 ], [ -0.9007361997196084, -0.4496619175020851, 1.7229088717582068 ], [ 0.653748530999037, 0.3211420593913464, -0.6790554878918231 ], [ 1.4904933264516436, -1.4274809445212848, 0.07741157242218612 ], ... ],
    	[ [ -0.594045937475725, 0.6476635534903448, 1.7133448600186956 ], [ 1.5316866839114827, -1.185720235718325, -0.19928009764320245 ], [ -0.9585922231287332, -0.599074943433102, -0.5060380784811219 ], [ -1.1810908736042474, 1.2349932327818296, -0.5806415487142423 ], [ 0.01939489153454381, 1.5895615303508153, 0.9120890875573453 ], [ 0.1669055463209996, -0.5739413246777545, 0.048755071941641775 ], [ 1.2069253120446293, -1.6582552357980958, 0.6128339241136334 ], [ -1.1920529309169692, -0.27230940334260556, 0.19336084540315035 ], ... ],
    	[ [ 0.40352818637939414, 1.8713875388389465, -0.7086986983396377 ], [ -0.49541077735902017, 1.0894282576488716, -0.6411361795112558 ], [ -0.7929664548508318, -0.79306575960962, -1.3214285404992885 ], [ 0.6416577402944803, 0.5852910267880125, -0.96415825376191 ], [ -1.014440496636258, 0.5958172114308171, -0.3562599054897978 ], [ -0.8819947658776174, 0.6347890183913449, 0.5386596431771185 ], [ 0.6712588012651791, -0.07540457944978796, 0.16247795323883432 ], [ 1.5255250731426337, 0.18156319800453563, 1.6365834680690292 ], ... ],
    	[ [ -0.876877830869984, 0.5112835937006717, 1.8142612136197012 ], [ -1.3675190546532798, 0.5617945152861406, 0.9044635562344749 ], [ -0.003985061930183623, 1.1498345965712573, -0.4407701264685614 ], [ 0.6370090859070607, 1.0911702025136407, -1.3750790298509872 ], [ -1.629407672350007, -1.1935112177631544, -0.5877997221959782 ], [ 0.2681810688600801, 0.9834478306000787, 0.6823636356930887 ], [ -0.26214466790291224, 0.24523278295368064, 1.9010258892498832 ], [ -0.9624256643106209, 1.2186716328529932, -1.6451584374101709 ], ... ],
    	[ [ 0.9328284842397648, -0.6749512421877027, 0.875356977550339 ], [ -1.229675212764938, 0.2893614488269637, -0.006377217921393874 ], [ -1.1383514925844562, 0.763839778759606, 1.1865820872371675 ], [ 1.3812923229645313, 1.294789276674353, -0.20580718765302886 ], [ -0.7929937016503773, -0.542682010348246, -1.234521195657477 ], [ 0.9329726525228831, 0.47504106235093624, -1.7748220167046338 ], [ -0.11103630026108915, 0.02957576243869034, 0.8032087634601474 ], [ -0.9526710173757258, 0.4163457196239635, -0.06781823882848143 ], ... ],
    	[ [ -1.0545116552027147, -1.4834423561711387, -1.8444691079078996 ], [ 0.015193876666020609, 0.9714524918605779, 0.023122793953330723 ], [ 0.8663756478538518, 0.4198598878909776, -0.8857044330172031 ], [ -1.3837793904658202, 1.4092806390175463, 0.8487431416017691 ], [ 1.1484565620299036, -1.5661154921019353, 0.957250446561368 ], [ -0.573154008677406, 1.351486607512428, 0.9168626445083783 ], [ -1.896492708533662, -0.10446134144711092, -0.5052220519413109 ], [ 0.01479674803092252, -1.4338356669672212, 0.7709112332354318 ], ... ],
    	[ [ -0.7740097652919271, -0.17251512797139795, -0.7305356869041453 ], [ 1.5956958670050962, -0.12331439031659296, 0.8586881329978256 ], [ 1.902619337582579, -0.4080874375142525, -0.3270293127945705 ], [ -0.7446910644231184, -1.7928108203231907, 1.6064961960118127 ], [ 1.70553490607535, -0.8739760025024054, -0.43651571972572445 ], [ 0.25228377983926153, -1.5178914596231883, -0.428930701159882 ], [ -1.602712160582625, 1.5972352709973834, 1.098561760285905 ], [ 0.45299285232262587, 0.6185281242834944, 0.2568300925799102 ], ... ],
    	...
    ]

To produce the following output:

TrainingTester.java:633 executed in 0.20 seconds (0.000 gc):

    Result[] array = ConstantResult.batchResultArray(pop(RefUtil.addRef(data)));
    @Nullable
    Result eval = layer.eval(array);
    assert eval != null;
    TensorList tensorList = Result.getData(eval);
    String temp_18_0016 = tensorList.stream().limit(1).map(x -> {
      String temp_18_0017 = x.prettyPrint();
      x.freeRef();
      return temp_18_0017;
    }).reduce((a, b) -> a + "\n" + b).orElse("");
    tensorList.freeRef();
    return temp_18_0016;

Returns

    [
    	[ [ -1.742049217224121, -0.7589096426963806, -0.023401055485010147 ], [ -0.8407581448554993, 0.2246779203414917, -2.1292858123779297 ], [ 1.1336147785186768, 1.8329511880874634, 1.7814077138900757 ], [ 1.2910969257354736, 0.3183075189590454, 0.7238197922706604 ], [ -1.6541409492492676, -0.2861637473106384, -0.9690592885017395 ], [ 0.03186923265457153, -0.14045970141887665, 1.4580785036087036 ], [ 0.18533287942409515, 0.3316853642463684, -0.06950917840003967 ], [ 1.3657612800598145, 0.8626564741134644, -1.3552052974700928 ], ... ],
    	[ [ -0.10434272140264511, 0.37360841035842896, -1.0389589071273804 ], [ 0.9668379426002502, 1.839725136756897, 0.007117905654013157 ], [ 0.031216179952025414, 1.8349345922470093, -0.862476110458374 ], [ -1.3231182098388672, 0.10343251377344131, -1.749462604522705 ], [ 0.13343247771263123, 1.34597647190094, -0.8132460117340088 ], [ 0.08276671916246414, -0.6398640275001526, -0.28572264313697815 ], [ -1.1436647176742554, -0.2809623181819916, 0.20043811202049255 ], [ 0.3313150703907013, -1.5724024772644043, 0.04640492796897888 ], ... ],
    	[ [ -0.931609034538269, -0.4636566638946533, 1.0349068641662598 ], [ -2.1620981693267822, 1.865268588066101, -0.15355676412582397 ], [ -1.6264656782150269, 0.9300855398178101, 0.1775273233652115 ], [ -0.3458615243434906, -1.660702109336853, -0.1561308652162552 ], [ -0.03487519174814224, 0.33535534143447876, 1.52305269241333 ], [ 0.22334466874599457, 1.1266887187957764, -0.06962288916110992 ], [ 2.010618209838867, -1.248648762702942, 1.177006721496582 ], [ -0.8520990014076233, 0.009430055506527424, 0.2867738902568817 ], ... ],
    	[ [ 0.539771318435669, 0.32288119196891785, -0.09196598827838898 ], [ -0.4671162962913513, -0.7015018463134766, 0.8884945511817932 ], [ 0.7773880958557129, 0.23691588640213013, 0.14047934114933014 ], [ -0.8334760665893555, 0.39486175775527954, -1.3788607120513916 ], [ -1.6244864463806152, 0.6023074388504028, -0.007760372944176197 ], [ 1.333664059638977, 0.745838463306427, -0.17416992783546448 ], [ -0.022828664630651474, -0.06360861659049988, -0.18917007744312286 ], [ 1.279162883758545, 0.11537640541791916, 1.9943299293518066 ], ... ],
    	[ [ -0.18504108488559723, 0.2614108920097351, 1.8900642395019531 ], [ -2.0760416984558105, -1.120998740196228, -1.3879860639572144 ], [ -0.0025474524591118097, 1.3697454929351807, 0.4565381705760956 ], [ -0.634149968624115, -0.9437309503555298, -0.17542949318885803 ], [ -0.13282746076583862, 0.7356611490249634, 0.08412784337997437 ], [ -0.39803752303123474, -1.0654767751693726, -1.0102325677871704 ], [ 0.06657543778419495, 0.17193874716758728, -0.563054621219635 ], [ 0.653261125087738, 1.6754300594329834, 1.1027677059173584 ], ... ],
    	[ [ 1.5047367811203003, -0.2529701590538025, -0.7200071215629578 ], [ -1.807608723640442, 0.559048056602478, -0.0054334606975317 ], [ -0.3648788630962372, -1.4211848974227905, 0.26337265968322754 ], [ -1.6105231046676636, 1.510939359664917, 0.3091653883457184 ], [ -0.0743689090013504, -0.07823678106069565, 1.9564504623413086 ], [ -0.9797886610031128, -0.03267301246523857, -0.15015867352485657 ], [ 0.19408459961414337, -0.026312561705708504, 1.2098561525344849 ], [ -0.12827110290527344, 0.3467918336391449, 0.09836817532777786 ], ... ],
    	[ [ 0.4383828043937683, 1.5329697132110596, 1.0254442691802979 ], [ 0.027303827926516533, 0.5772092938423157, 0.024929296225309372 ], [ 0.8511155247688293, 0.7086995840072632, 0.3019624948501587 ], [ 1.247290849685669, -2.1153030395507812, 0.4589805006980896 ], [ -0.7016096711158752, -1.7343964576721191, 0.3778062164783478 ], [ 0.23124966025352478, -2.422400712966919, -1.0074270963668823 ], [ -0.12015113234519958, 0.14185835421085358, 0.7067322134971619 ], [ 0.020036159083247185, 1.1319100856781006, -1.3986719846725464 ], ... ],
    	[ [ -1.2210665941238403, -0.2252630591392517, -0.03892482817173004 ], [ 0.40052059292793274, 0.18521179258823395, -1.0839232206344604 ], [ -1.9473401308059692, -0.7616400718688965, 0.35309526324272156 ], [ -0.6943474411964417, 0.698397696018219, -2.2494077682495117 ], [ -1.8254374265670776, 0.2825780212879181, -0.5278592705726624 ], [ 0.4853794276714325, 1.8696305751800537, 0.5958019495010376 ], [ -0.7575552463531494, -2.152587413787842, -0.6198601126670837 ], [ 0.5451574921607971, 0.12275219708681107, -0.2315288633108139 ], ... ],
    	...
    ]

Conjugate Gradient Descent

First, we use a conjugate gradient descent method, which converges the fastest for purely linear functions.

TrainingTester.java:452 executed in 87.49 seconds (4.722 gc):

    IterativeTrainer iterativeTrainer = new IterativeTrainer(trainable.addRef());
    try {
      iterativeTrainer.setLineSearchFactory(label -> new QuadraticSearch());
      iterativeTrainer.setOrientation(new GradientDescent());
      iterativeTrainer.setMonitor(TrainingTester.getMonitor(history));
      iterativeTrainer.setTimeout(30, TimeUnit.SECONDS);
      iterativeTrainer.setMaxIterations(250);
      iterativeTrainer.setTerminateThreshold(0);
      return iterativeTrainer.run();
    } finally {
      iterativeTrainer.freeRef();
    }
Logging
Reset training subject: 3191740389497
Reset training subject: 3193218000547
Constructing line search parameters: GD
F(0.0) = LineSearchPoint{point=PointSample{avg=3.555083177749154}, derivative=-2.4578176780647247E-6}
F(1.0E-10) = LineSearchPoint{point=PointSample{avg=3.555083177749154}, derivative=-2.4578176780647247E-6}, evalInputDelta = 0.0
F(7.000000000000001E-10) = LineSearchPoint{point=PointSample{avg=3.555083177749154}, derivative=-2.4578176780647247E-6}, evalInputDelta = 0.0
F(4.900000000000001E-9) = LineSearchPoint{point=PointSample{avg=3.555083177749154}, derivative=-2.4578176780647247E-6}, evalInputDelta = 0.0
F(3.430000000000001E-8) = LineSearchPoint{point=PointSample{avg=3.555083177749154}, derivative=-2.4578176780647247E-6}, evalInputDelta = 0.0
F(2.4010000000000004E-7) = LineSearchPoint{point=PointSample{avg=3.555083177749154}, derivative=-2.4578176780647247E-6}, evalInputDelta = 0.0
New Minimum: 3.555083177749154 > 3.5550831777491525
F(1.6807000000000003E-6) = LineSearchPoint{point=PointSample{avg=3.5550831777491525}, derivative=-2.4578176780647247E-6}, evalInputDelta = -1.3322676295501878E-15
New Minimum: 3.5550831777491525 > 3.555083177749143
F(1.1764900000000001E-5) = LineSearchPoint{point=PointSample{avg=3.555083177749143}, derivative=-2.4578176780647243E-6}, evalInputDelta = -1.0658141036401503E-14
New Minimum: 3.555083177749143 > 3.555083177742594
F(8.235430000000001E-5) = LineSearchPoint{point=PointSample{avg=3.555083177742594}, derivative=-2.4578176780626105E-6}, evalInputDelta = -6.559641718695275E-12
New Minimum: 3.555083177742594 > 3.555083177263448
F(5.764801000000001E-4) = LineSearchPoint{point=PointSample{avg=3.555083177263448}, derivative=-2.4578176777982625E-6}, evalInputDelta = -4.857056978835317E-10
New Minimum: 3.555083177263448 > 3.55508316779373
F(0.004035360700000001) = LineSearchPoint{point=PointSample{avg=3.55508316779373}, derivative=-2.457817671253501E-6}, evalInputDelta = -9.95542404069738E-9
New Minimum: 3.55508316779373 > 3.555083107699821
F(0.028247524900000005) = LineSearchPoint{point=PointSample{avg=3.555083107699821}, derivative=-2.457817627851271E-6}, evalInputDelta = -7.004933300081007E-8
New Minimum: 3.555083107699821 > 3.5550826913228795
F(0.19773267430000002) = LineSearchPoint{point=PointSample{avg=3.5550826913228795}, derivative=-2.457817331336786E-6}, evalInputDelta = -4.864262743708991E-7
New Minimum: 3.5550826913228795 > 3.555079775248096
F(1.3841287201) = LineSearchPoint{point=PointSample{avg=3.555079775248096}, derivative=-2.457815256946482E-6}, evalInputDelta = -3.402501057792051E-6
New Minimum: 3.555079775248096 > 3.555059364108255
F(9.688901040700001) = LineSearchPoint{point=PointSample{avg=3.555059364108255}, derivative=-2.457800736030193E-6}, evalInputDelta = -2.3813640898762145E-5
New Minimum: 3.555059364108255 > 3.554916486675965
F(67.8223072849) = LineSearchPoint{point=PointSample{avg=3.554916486675965}, derivative=-2.4576990888182587E-6}, evalInputDelta = -1.66691073189007E-4
New Minimum: 3.554916486675965 > 3.5539165105142425
F(474.7561509943) = LineSearchPoint{point=PointSample{avg=3.5539165105142425}, derivative=-2.456987653362808E-6}, evalInputDelta = -0.0011666672349113227
New Minimum: 3.5539165105142425 > 3.54692477874178
F(3323.2930569601003) = LineSearchPoint{point=PointSample{avg=3.54692477874178}, derivative=-2.4520121947792385E-6}, evalInputDelta = -0.008158399007373696
New Minimum: 3.54692477874178 > 3.498377896458339
F(23263.0513987207) = LineSearchPoint{point=PointSample{avg=3.498377896458339}, derivative=-2.417408246096232E-6}, evalInputDelta = -0.056705281290814646
New Minimum: 3.498377896458339 > 3.1773303412081937
F(162841.3597910449) = LineSearchPoint{point=PointSample{avg=3.1773303412081937}, derivative=-2.185945321290458E-6}, evalInputDelta = -0.37775283654096015
New Minimum: 3.1773303412081937 > 1.6711035662457696
F(1139889.5185373144) = LineSearchPoint{point=PointSample{avg=1.6711035662457696}, derivative=-1.0163113040866022E-6}, evalInputDelta = -1.8839796115033842
F(7979226.6297612) = LineSearchPoint{point=PointSample{avg=18.565514356452958}, derivative=1.1455975358734525E-5}, evalInputDelta = 15.010431178703804
F(613786.6638277846) = LineSearchPoint{point=PointSample{avg=2.339905953439792}, derivative=-1.5568117260927205E-6}, evalInputDelta = -1.215177224309362
New Minimum: 1.6711035662457696 > 1.3226079681014262
F(4296506.646794492) = LineSearchPoint{point=PointSample{avg=1.3226079681014262}, derivative=8.296162576451246E-7}, evalInputDelta = -2.2324752096477276
1.3226079681014262 <= 3.555083177749154
New Minimum: 1.3226079681014262 > 0.8597610903899167
F(3212240.9748544157) = LineSearchPoint{point=PointSample{avg=0.8597610903899167}, derivative=1.0367121715725015E-7}, evalInputDelta = -2.695322087359237
Right bracket at 3212240.9748544157
New Minimum: 0.8597610903899167 > 0.8503189361553563
F(3082231.849190535) = LineSearchPoint{point=PointSample{avg=0.8503189361553563}, derivative=4.208987910076242E-8}, evalInputDelta = -2.7047642415937974
Right bracket at 3082231.849190535
New Minimum: 0.8503189361553563 > 0.8487539337439773
F(3030337.6239336445) = LineSearchPoint{point=PointSample{avg=0.8487539337439773}, derivative=1.8290866057805067E-8}, evalInputDelta = -2.7063292440051763
Right bracket at 3030337.6239336445
Converged to right
Fitness changed from 3.555083177749154 to 0.8487539337439773
Iteration 1 complete. Error: 0.8487539337439773 Total: 87.4922; Orientation: 1.4796; Line Search: 81.4766
Final threshold in iteration 1: 0.8487539337439773 (> 0.0) after 87.493s (< 30.000s)

Returns

    0.8487539337439773

This training apply resulted in the following configuration:

TrainingTester.java:610 executed in 0.00 seconds (0.000 gc):

    RefList<double[]> state = network.state();
    assert state != null;
    String description = state.stream().map(RefArrays::toString).reduce((a, b) -> a + "\n" + b)
        .orElse("");
    state.freeRef();
    return description;

Returns

    

And regressed input:

TrainingTester.java:622 executed in 0.00 seconds (0.000 gc):

    return RefArrays.stream(RefUtil.addRef(data)).flatMap(x -> {
      return RefArrays.stream(x);
    }).limit(1).map(x -> {
      String temp_18_0015 = x.prettyPrint();
      x.freeRef();
      return temp_18_0015;
    }).reduce((a, b) -> a + "\n" + b).orElse("");

Returns

    [
    	[ [ 0.07335271489199635, -0.09716735206244942, 0.13837843280199405 ], [ -0.07093884795910965, -0.29701400883581075, -1.4860980501419327 ], [ -0.5335619241803422, -0.09244830316254715, 0.139563964449545 ], [ 0.09320039971836147, -0.9552663203073796, -0.968902992415911 ], [ 0.8555070696787495, -0.4757149164722325, 0.6265107417932101 ], [ -0.20028941514222176, 0.15107167584460873, -0.1802813864520567 ], [ -1.4402431515156808, -1.8001057348948435, 0.8015046841805555 ], [ -2.117580056891961, 1.5119857562514971, 0.7663988390600136 ], ... ],
    	[ [ 0.38937498394486747, -0.031294475833110125, 1.191649775634831 ], [ -1.3448877813003735, 0.562671156468818, -0.9978335805878085 ], [ -0.3938230707360849, -0.599876370400558, 0.5623827109631326 ], [ -0.6814060622306035, -0.3967911934060275, -0.14291062112444997 ], [ 0.3623586116876074, 0.08612475324689584, -0.7223026622282369 ], [ -0.9028535816227119, -0.5579812504644968, 1.627735572684249 ], [ 0.19860193466450238, 0.1748696822011433, -0.6878433678583351 ], [ 1.440142432845558, -0.16050535189395165, 0.31155916248326143 ], ... ],
    	[ [ -1.1751496790450164, 0.980323168111473, 1.5331425915318166 ], [ 0.49539058841868266, 0.07481493791657257, -0.6113675757340788 ], [ 0.011825316684591858, 0.289423526164179, -0.5349085672245395 ], [ -1.1497151664505916, 1.5427161628683872, -0.5709823165989396 ], [ -0.12039812158679808, 1.5825482547831504, 0.08402933763460085 ], [ -0.10084314503150948, -0.9016476419481014, -0.6163278027376798 ], [ 0.17994382564140343, -1.1010264748846845, 0.7762938103311698 ], [ -1.042648114443812, -0.27319927740930744, -1.0336948040873302 ], ... ],
    	[ [ 0.4596931472437609, 1.8466172639829637, -0.6761950570316865 ], [ -0.1485844598141554, 1.0475185112455723, -0.2417503434841849 ], [ -0.6001714678122245, -0.7040958795019667, -1.0724406140035032 ], [ 0.3818246261393908, 0.9226314126950887, -0.2513413178939241 ], [ -1.1134947102813042, 0.5147607740185279, -0.3570074189239349 ], [ -0.3700344223054425, 0.4127227355272418, 0.12064665956570753 ], [ 0.6116050990427094, -0.062187703579004994, -0.014465429085839282 ], [ 1.702477159634372, 0.10552838757170226, 1.0630421522492939 ], ... ],
    	[ [ -0.9139157233182958, 0.3481617001319698, 1.7287295753634369 ], [ -0.44578472646359324, 1.2457146390155995, -0.08358062486438977 ], [ -0.07296842978966088, 0.545378482250546, -0.12086220606722248 ], [ 0.8699990856557911, 0.7781651339452622, -1.360925841712346 ], [ -1.6219518862272821, -0.8297171937768739, -0.6102324808443848 ], [ 0.6598510506185882, 0.49867541937709536, -0.10464112725726005 ], [ -0.24530416404394564, 0.3293091062052075, 1.7084018413160074 ], [ -0.7163055627501562, 0.8582150753800387, -1.8325604324560154 ], ... ],
    	[ [ 0.16441725366174453, -0.6259173587173911, 0.27527937879935893 ], [ -1.0734318479309235, -0.08636771370056101, -0.013214327570349448 ], [ -1.110601448065966, 0.6598394701105346, 1.1709996438715036 ], [ 1.3274872266720759, -0.031697562530782486, 0.7438594133750918 ], [ -0.7613385185187586, -0.4698650128898616, -0.8391188005981302 ], [ 0.9012569300560198, 0.39175234479498633, -1.7714340223257528 ], [ -0.10826460620464355, 0.4022482413340568, -0.37251454007119134 ], [ -0.8395570431943579, -0.12336621821744553, 0.0017308551551220158 ], ... ],
    	[ [ -1.1652749163299658, -0.8836103146266859, -1.4891822632875442 ], [ -0.2862387142073561, 0.4176671556763011, -0.3475405689054939 ], [ -0.4152832738833432, -0.08673618360531277, -0.8330846052658424 ], [ -0.8999605776222142, 0.2970381935034465, 0.919906820231508 ], [ 1.034725793443196, -1.4226428019401198, 1.0528819605975372 ], [ -0.7260301006580143, 0.8438169453894735, -0.17357321674885862 ], [ -1.8979097861348813, -0.26684964372554426, -0.17510952456340456 ], [ 0.18390190649015153, -1.531150331735015, 0.6067183948000985 ], ... ],
    	[ [ -0.2735969500254588, 0.09060425021058466, -0.6803066260740653 ], [ 1.571812373903893, -0.3859434465353857, 0.42925270034486995 ], [ 1.9561703610278216, 0.005819070897182033, -0.3242375221057014 ], [ 0.04712417596203533, -1.6570901559318363, 0.750846358214883 ], [ 1.3648177890422863, -0.6610757955040032, 0.1257160583767667 ], [ -0.14955364044085973, -0.7525374494982024, -0.47762504223061525 ], [ -0.9087448973336262, 1.7963626354363602, 1.0369033005011505 ], [ -0.2344149199510095, 0.56827731262184, 0.31673946213540966 ], ... ],
    	...
    ]

To produce the following output:

TrainingTester.java:633 executed in 0.20 seconds (0.000 gc):

    Result[] array = ConstantResult.batchResultArray(pop(RefUtil.addRef(data)));
    @Nullable
    Result eval = layer.eval(array);
    assert eval != null;
    TensorList tensorList = Result.getData(eval);
    String temp_18_0016 = tensorList.stream().limit(1).map(x -> {
      String temp_18_0017 = x.prettyPrint();
      x.freeRef();
      return temp_18_0017;
    }).reduce((a, b) -> a + "\n" + b).orElse("");
    tensorList.freeRef();
    return temp_18_0016;

Returns

    [
    	[ [ -0.0538499541580677, -0.11471050977706909, -0.02306058444082737 ], [ 0.12239726632833481, -0.3217439651489258, -3.0275213718414307 ], [ 0.48462724685668945, 0.05757341906428337, 0.175058051943779 ], [ 0.08979812264442444, 0.5366676449775696, 0.5753070712089539 ], [ -0.5424664616584778, -0.2624627351760864, 0.10543768107891083 ], [ 0.1013258695602417, -0.13704517483711243, -0.03545386716723442 ], [ -0.05393911898136139, 0.3330814242362976, 0.26676127314567566 ], [ 3.0704269409179688, -0.4714067876338959, 0.2765446901321411 ], ... ],
    	[ [ 0.0742676630616188, -0.05657273158431053, -0.29348570108413696 ], [ 0.43236610293388367, 0.10417015105485916, 0.16513627767562866 ], [ -0.7040518522262573, 0.5355246663093567, -0.6425060033798218 ], [ -0.9639641046524048, 0.13441909849643707, -0.031255368143320084 ], [ 0.04154719039797783, -0.08350443094968796, -1.4130789041519165 ], [ 0.10344363003969193, -0.8116368055343628, -1.8515336513519287 ], [ -0.3071041405200958, -0.14237114787101746, 0.217220276594162 ], [ -0.03490573167800903, 0.05885091796517372, 0.18628007173538208 ], ... ],
    	[ [ -2.019498348236084, -0.9834486842155457, 0.2735787332057953 ], [ -0.1572641283273697, -0.038746610283851624, -0.48965761065483093 ], [ 0.012334435246884823, -0.31327152252197266, 0.21045254170894623 ], [ -0.207158163189888, -2.5035808086395264, -0.1418635994195938 ], [ 0.21585969626903534, 0.25637853145599365, 0.09532852470874786 ], [ -0.12976615130901337, 1.8402059078216553, 0.8021523952484131 ], [ 0.15170225501060486, 0.21913577616214752, 1.5280020236968994 ], [ -0.5018520355224609, 0.011527332477271557, -1.054724097251892 ], ... ],
    	[ [ 0.6223424077033997, -0.03969878330826759, 0.002165217651054263 ], [ -0.10885391384363174, -0.6019600033760071, 0.28343823552131653 ], [ 0.4923279583454132, 0.07007095217704773, 1.3672459125518799 ], [ -0.4420938491821289, 0.8735848069190979, -0.22507764399051666 ], [ -1.8515197038650513, 0.49504101276397705, -0.01371446531265974 ], [ 0.4361816346645355, 0.4314355254173279, 0.027841724455356598 ], [ -0.2635622024536133, -0.052381426095962524, 0.01635429635643959 ], [ 2.0469768047332764, 0.0644676685333252, 0.5328778028488159 ], ... ],
    	[ [ -0.46365296840667725, 0.12121656537055969, 1.5512741804122925 ], [ -0.2948160469532013, -2.6268093585968018, 0.0702323466539383 ], [ -0.046487804502248764, 0.32852834463119507, 0.10564108937978745 ], [ -0.9849780201911926, -0.3795618712902069, -0.015404892154037952 ], [ -0.6804324388504028, 0.025525599718093872, 0.17004406452178955 ], [ -1.0025345087051392, -0.31568169593811035, 0.10691433399915695 ], [ 0.05804039537906647, 0.23952896893024445, -3.160215377807617 ], [ 0.2529211938381195, 0.9004620313644409, 2.4844322204589844 ], ... ],
    	[ [ 0.18061716854572296, -0.18202237784862518, -0.05424771085381508 ], [ -1.435882568359375, -0.159909188747406, -0.011259178631007671 ], [ -0.2554713189601898, -1.1984202861785889, 0.1722104847431183 ], [ -1.4636174440383911, 0.007596331182867289, -0.882543683052063 ], [ 0.0560777522623539, 0.02392914704978466, 1.061417579650879 ], [ -0.9210255742073059, 0.0758354663848877, -0.68281489610672 ], [ 0.1892206072807312, -0.3412841260433197, -0.2635633051395416 ], [ 0.22828471660614014, -0.06051357090473175, -0.002502922434359789 ], ... ],
    	[ [ 1.3950875997543335, 0.22538885474205017, -0.4057798385620117 ], [ -0.5086396932601929, -0.0663561075925827, -0.356827050447464 ], [ 0.09082711488008499, -0.13142383098602295, 0.18082380294799805 ], [ 0.2142246514558792, -0.12888292968273163, 0.6053487658500671 ], [ -0.4265356659889221, -1.2950292825698853, 0.738592803478241 ], [ 0.4755764603614807, -1.1736533641815186, 0.020374752581119537 ], [ -0.24772250652313232, 0.363915354013443, 0.22020167112350464 ], [ 0.2480110079050064, 1.4968611001968384, -1.055946946144104 ], ... ],
    	[ [ -0.3541810214519501, 0.11355371028184891, 0.14125172793865204 ], [ 0.19109760224819183, 0.5818293690681458, -0.40641555190086365 ], [ -2.2019052505493164, 0.010163696482777596, 0.349806010723114 ], [ 0.01146409846842289, -0.10961627215147018, -0.34343376755714417 ], [ -0.7789419293403625, -0.06893695145845413, 0.11776620149612427 ], [ -0.2756640613079071, 0.25970882177352905, 0.6703637838363647 ], [ 0.8023470044136047, -2.853365659713745, -0.4665166437625885 ], [ -0.19814147055149078, 0.03834560513496399, -0.2905344069004059 ], ... ],
    	...
    ]

Limited-Memory BFGS

Next, we apply the same optimization using L-BFGS, which is nearly ideal for purely second-order or quadratic functions.

TrainingTester.java:509 executed in 47.12 seconds (6.407 gc):

    IterativeTrainer iterativeTrainer = new IterativeTrainer(trainable.addRef());
    try {
      iterativeTrainer.setLineSearchFactory(label -> new ArmijoWolfeSearch());
      iterativeTrainer.setOrientation(new LBFGS());
      iterativeTrainer.setMonitor(TrainingTester.getMonitor(history));
      iterativeTrainer.setTimeout(30, TimeUnit.SECONDS);
      iterativeTrainer.setIterationsPerSample(100);
      iterativeTrainer.setMaxIterations(250);
      iterativeTrainer.setTerminateThreshold(0);
      return iterativeTrainer.run();
    } finally {
      iterativeTrainer.freeRef();
    }
Logging
Reset training subject: 3279786604577
Reset training subject: 3281280470600
Adding measurement 3d5a9d54 to history. Total: 0
LBFGS Accumulation History: 1 points
Constructing line search parameters: GD
Non-optimal measurement 3.555083177749154 < 3.555083177749154. Total: 1
th(0)=3.555083177749154;dx=-2.4578176780647247E-6
Adding measurement 1054c5d9 to history. Total: 1
New Minimum: 3.555083177749154 > 3.555077882161754
WOLFE (weak): th(2.154434690031884)=3.555077882161754; dx=-2.4578139101780364E-6 evalInputDelta=5.2955873997007075E-6
Adding measurement 520dfeea to history. Total: 2
New Minimum: 3.555077882161754 > 3.5550725870959594
WOLFE (weak): th(4.308869380063768)=3.5550725870959594; dx=-2.4578101431147933E-6 evalInputDelta=1.0590653194419986E-5
Adding measurement 78dbd9f3 to history. Total: 3
New Minimum: 3.5550725870959594 > 3.5550514064719607
WOLFE (weak): th(12.926608140191302)=3.5550514064719607; dx=-2.457795074683336E-6 evalInputDelta=3.1771277193115566E-5
Adding measurement 1898ba0 to history. Total: 4
New Minimum: 3.5550514064719607 > 3.554956094864879
WOLFE (weak): th(51.70643256076521)=3.554956094864879; dx=-2.4577272673882216E-6 evalInputDelta=1.2708288427498715E-4
Adding measurement 4ce108cf to history. Total: 5
New Minimum: 3.554956094864879 > 3.554447811079023
WOLFE (weak): th(258.53216280382605)=3.554447811079023; dx=-2.457365653670841E-6 evalInputDelta=6.353666701306437E-4
Adding measurement 9c60c61 to history. Total: 6
New Minimum: 3.554447811079023 > 3.551272731143033
WOLFE (weak): th(1551.1929768229563)=3.551272731143033; dx=-2.4551065276960744E-6 evalInputDelta=0.003810446606120621
Adding measurement 6c9ac925 to history. Total: 7
New Minimum: 3.551272731143033 > 3.528498200285031
WOLFE (weak): th(10858.350837760694)=3.528498200285031; dx=-2.4388895985106823E-6 evalInputDelta=0.026584977464122694
Adding measurement 798e82ae to history. Total: 8
New Minimum: 3.528498200285031 > 3.348070231870996
WOLFE (weak): th(86866.80670208555)=3.348070231870996; dx=-2.3096227808675794E-6 evalInputDelta=0.20701294587815777
Adding measurement 296b8deb to history. Total: 9
New Minimum: 3.348070231870996 > 2.0948370301768464
END: th(781801.26031877)=2.0948370301768464; dx=-1.3638607793296401E-6 evalInputDelta=1.4602461475723074
Fitness changed from 3.555083177749154 to 2.0948370301768464
Iteration 1 complete. Error: 2.0948370301768464 Total: 47.1189; Orientation: 1.6977; Line Search: 40.9269
Final threshold in iteration 1: 2.0948370301768464 (> 0.0) after 47.119s (< 30.000s)

Returns

    2.0948370301768464

This training apply resulted in the following configuration:

TrainingTester.java:610 executed in 0.00 seconds (0.000 gc):

    RefList<double[]> state = network.state();
    assert state != null;
    String description = state.stream().map(RefArrays::toString).reduce((a, b) -> a + "\n" + b)
        .orElse("");
    state.freeRef();
    return description;

Returns

    

And regressed input:

TrainingTester.java:622 executed in 0.00 seconds (0.000 gc):

    return RefArrays.stream(RefUtil.addRef(data)).flatMap(x -> {
      return RefArrays.stream(x);
    }).limit(1).map(x -> {
      String temp_18_0015 = x.prettyPrint();
      x.freeRef();
      return temp_18_0015;
    }).reduce((a, b) -> a + "\n" + b).orElse("");

Returns

    [
    	[ [ 1.114128964040427, -0.5444743185721016, 0.13958164949374618 ], [ 0.4447117045678727, 0.18455992135499263, -1.1550894920329002 ], [ -0.9271515007035558, -1.187320247637766, 0.9887452640652843 ], [ 0.8669665974770782, -0.887545932201556, -1.0394657831541307 ], [ 1.3723107645736736, -0.4996707238189714, 1.2835513239854226 ], [ -0.06354509724714189, 0.15472853440306006, -1.3316698327768772 ], [ -1.4608389698375097, -1.8000272787010252, 0.8983333215521693 ], [ -1.9412936282867492, 1.817703839305624, -0.870767918558252 ], ... ],
    	[ [ 0.5723728493963148, 0.20265671447596129, 1.456064681531451 ], [ -1.5015348482147632, 1.4896838142683944, -1.005377150504087 ], [ 0.01711837147273479, -1.2499675762838705, 0.7030803567310698 ], [ -0.8673489014461553, -0.3576196776268742, -1.2982842211312189 ], [ 0.47339379815156835, -0.8503825787018818, -0.426758689612328 ], [ -0.9007361997196084, -0.4496619175020851, 1.7229088717582068 ], [ 0.653748530999037, 0.3211420593913464, -0.6790554878918231 ], [ 1.4904933264516436, -1.4274809445212848, 0.07741157242218612 ], ... ],
    	[ [ -0.594045937475725, 0.6476635534903448, 1.7133448600186956 ], [ 1.5316866839114827, -1.185720235718325, -0.19928009764320245 ], [ -0.9585922231287332, -0.599074943433102, -0.5060380784811219 ], [ -1.1810908736042474, 1.2349932327818296, -0.5806415487142423 ], [ 0.01939489153454381, 1.5895615303508153, 0.9120890875573453 ], [ 0.1669055463209996, -0.5739413246777545, 0.048755071941641775 ], [ 1.2069253120446293, -1.6582552357980958, 0.6128339241136334 ], [ -1.1920529309169692, -0.27230940334260556, 0.19336084540315035 ], ... ],
    	[ [ 0.40352818637939414, 1.8713875388389465, -0.7086986983396377 ], [ -0.49541077735902017, 1.0894282576488716, -0.6411361795112558 ], [ -0.7929664548508318, -0.79306575960962, -1.3214285404992885 ], [ 0.6416577402944803, 0.5852910267880125, -0.96415825376191 ], [ -1.014440496636258, 0.5958172114308171, -0.3562599054897978 ], [ -0.8819947658776174, 0.6347890183913449, 0.5386596431771185 ], [ 0.6712588012651791, -0.07540457944978796, 0.16247795323883432 ], [ 1.5255250731426337, 0.18156319800453563, 1.6365834680690292 ], ... ],
    	[ [ -0.876877830869984, 0.5112835937006717, 1.8142612136197012 ], [ -1.3675190546532798, 0.5617945152861406, 0.9044635562344749 ], [ -0.003985061930183623, 1.1498345965712573, -0.4407701264685614 ], [ 0.6370090859070607, 1.0911702025136407, -1.3750790298509872 ], [ -1.629407672350007, -1.1935112177631544, -0.5877997221959782 ], [ 0.2681810688600801, 0.9834478306000787, 0.6823636356930887 ], [ -0.26214466790291224, 0.24523278295368064, 1.9010258892498832 ], [ -0.9624256643106209, 1.2186716328529932, -1.6451584374101709 ], ... ],
    	[ [ 0.9328284842397648, -0.6749512421877027, 0.875356977550339 ], [ -1.229675212764938, 0.2893614488269637, -0.006377217921393874 ], [ -1.1383514925844562, 0.763839778759606, 1.1865820872371675 ], [ 1.3812923229645313, 1.294789276674353, -0.20580718765302886 ], [ -0.7929937016503773, -0.542682010348246, -1.234521195657477 ], [ 0.9329726525228831, 0.47504106235093624, -1.7748220167046338 ], [ -0.11103630026108915, 0.02957576243869034, 0.8032087634601474 ], [ -0.9526710173757258, 0.4163457196239635, -0.06781823882848143 ], ... ],
    	[ [ -1.0545116552027147, -1.4834423561711387, -1.8444691079078996 ], [ 0.015193876666020609, 0.9714524918605779, 0.023122793953330723 ], [ 0.8663756478538518, 0.4198598878909776, -0.8857044330172031 ], [ -1.3837793904658202, 1.4092806390175463, 0.8487431416017691 ], [ 1.1484565620299036, -1.5661154921019353, 0.957250446561368 ], [ -0.573154008677406, 1.351486607512428, 0.9168626445083783 ], [ -1.896492708533662, -0.10446134144711092, -0.5052220519413109 ], [ 0.01479674803092252, -1.4338356669672212, 0.7709112332354318 ], ... ],
    	[ [ -0.7740097652919271, -0.17251512797139795, -0.7305356869041453 ], [ 1.5956958670050962, -0.12331439031659296, 0.8586881329978256 ], [ 1.902619337582579, -0.4080874375142525, -0.3270293127945705 ], [ -0.7446910644231184, -1.7928108203231907, 1.6064961960118127 ], [ 1.70553490607535, -0.8739760025024054, -0.43651571972572445 ], [ 0.25228377983926153, -1.5178914596231883, -0.428930701159882 ], [ -1.602712160582625, 1.5972352709973834, 1.098561760285905 ], [ 0.45299285232262587, 0.6185281242834944, 0.2568300925799102 ], ... ],
    	...
    ]

To produce the following output:

TrainingTester.java:633 executed in 0.20 seconds (0.000 gc):

    Result[] array = ConstantResult.batchResultArray(pop(RefUtil.addRef(data)));
    @Nullable
    Result eval = layer.eval(array);
    assert eval != null;
    TensorList tensorList = Result.getData(eval);
    String temp_18_0016 = tensorList.stream().limit(1).map(x -> {
      String temp_18_0017 = x.prettyPrint();
      x.freeRef();
      return temp_18_0017;
    }).reduce((a, b) -> a + "\n" + b).orElse("");
    tensorList.freeRef();
    return temp_18_0016;

Returns

    [
    	[ [ -1.742049217224121, -0.7589096426963806, -0.023401055485010147 ], [ -0.8407581448554993, 0.2246779203414917, -2.1292858123779297 ], [ 1.1336147785186768, 1.8329511880874634, 1.7814077138900757 ], [ 1.2910969257354736, 0.3183075189590454, 0.7238197922706604 ], [ -1.6541409492492676, -0.2861637473106384, -0.9690592885017395 ], [ 0.03186923265457153, -0.14045970141887665, 1.4580785036087036 ], [ 0.18533287942409515, 0.3316853642463684, -0.06950917840003967 ], [ 1.3657612800598145, 0.8626564741134644, -1.3552052974700928 ], ... ],
    	[ [ -0.10434272140264511, 0.37360841035842896, -1.0389589071273804 ], [ 0.9668379426002502, 1.839725136756897, 0.007117905654013157 ], [ 0.031216179952025414, 1.8349345922470093, -0.862476110458374 ], [ -1.3231182098388672, 0.10343251377344131, -1.749462604522705 ], [ 0.13343247771263123, 1.34597647190094, -0.8132460117340088 ], [ 0.08276671916246414, -0.6398640275001526, -0.28572264313697815 ], [ -1.1436647176742554, -0.2809623181819916, 0.20043811202049255 ], [ 0.3313150703907013, -1.5724024772644043, 0.04640492796897888 ], ... ],
    	[ [ -0.931609034538269, -0.4636566638946533, 1.0349068641662598 ], [ -2.1620981693267822, 1.865268588066101, -0.15355676412582397 ], [ -1.6264656782150269, 0.9300855398178101, 0.1775273233652115 ], [ -0.3458615243434906, -1.660702109336853, -0.1561308652162552 ], [ -0.03487519174814224, 0.33535534143447876, 1.52305269241333 ], [ 0.22334466874599457, 1.1266887187957764, -0.06962288916110992 ], [ 2.010618209838867, -1.248648762702942, 1.177006721496582 ], [ -0.8520990014076233, 0.009430055506527424, 0.2867738902568817 ], ... ],
    	[ [ 0.539771318435669, 0.32288119196891785, -0.09196598827838898 ], [ -0.4671162962913513, -0.7015018463134766, 0.8884945511817932 ], [ 0.7773880958557129, 0.23691588640213013, 0.14047934114933014 ], [ -0.8334760665893555, 0.39486175775527954, -1.3788607120513916 ], [ -1.6244864463806152, 0.6023074388504028, -0.007760372944176197 ], [ 1.333664059638977, 0.745838463306427, -0.17416992783546448 ], [ -0.022828664630651474, -0.06360861659049988, -0.18917007744312286 ], [ 1.279162883758545, 0.11537640541791916, 1.9943299293518066 ], ... ],
    	[ [ -0.18504108488559723, 0.2614108920097351, 1.8900642395019531 ], [ -2.0760416984558105, -1.120998740196228, -1.3879860639572144 ], [ -0.0025474524591118097, 1.3697454929351807, 0.4565381705760956 ], [ -0.634149968624115, -0.9437309503555298, -0.17542949318885803 ], [ -0.13282746076583862, 0.7356611490249634, 0.08412784337997437 ], [ -0.39803752303123474, -1.0654767751693726, -1.0102325677871704 ], [ 0.06657543778419495, 0.17193874716758728, -0.563054621219635 ], [ 0.653261125087738, 1.6754300594329834, 1.1027677059173584 ], ... ],
    	[ [ 1.5047367811203003, -0.2529701590538025, -0.7200071215629578 ], [ -1.807608723640442, 0.559048056602478, -0.0054334606975317 ], [ -0.3648788630962372, -1.4211848974227905, 0.26337265968322754 ], [ -1.6105231046676636, 1.510939359664917, 0.3091653883457184 ], [ -0.0743689090013504, -0.07823678106069565, 1.9564504623413086 ], [ -0.9797886610031128, -0.03267301246523857, -0.15015867352485657 ], [ 0.19408459961414337, -0.026312561705708504, 1.2098561525344849 ], [ -0.12827110290527344, 0.3467918336391449, 0.09836817532777786 ], ... ],
    	[ [ 0.4383828043937683, 1.5329697132110596, 1.0254442691802979 ], [ 0.027303827926516533, 0.5772092938423157, 0.024929296225309372 ], [ 0.8511155247688293, 0.7086995840072632, 0.3019624948501587 ], [ 1.247290849685669, -2.1153030395507812, 0.4589805006980896 ], [ -0.7016096711158752, -1.7343964576721191, 0.3778062164783478 ], [ 0.23124966025352478, -2.422400712966919, -1.0074270963668823 ], [ -0.12015113234519958, 0.14185835421085358, 0.7067322134971619 ], [ 0.020036159083247185, 1.1319100856781006, -1.3986719846725464 ], ... ],
    	[ [ -1.2210665941238403, -0.2252630591392517, -0.03892482817173004 ], [ 0.40052059292793274, 0.18521179258823395, -1.0839232206344604 ], [ -1.9473401308059692, -0.7616400718688965, 0.35309526324272156 ], [ -0.6943474411964417, 0.698397696018219, -2.2494077682495117 ], [ -1.8254374265670776, 0.2825780212879181, -0.5278592705726624 ], [ 0.4853794276714325, 1.8696305751800537, 0.5958019495010376 ], [ -0.7575552463531494, -2.152587413787842, -0.6198601126670837 ], [ 0.5451574921607971, 0.12275219708681107, -0.2315288633108139 ], ... ],
    	...
    ]

TrainingTester.java:432 executed in 0.15 seconds (0.000 gc):

    return TestUtil.compare(title + " vs Iteration", runs);
Logging
Plotting range=[0.0, -0.07121819986979998], [2.0, 0.3211502422675081]; valueStats=DoubleSummaryStatistics{count=3, sum=5.038428, min=0.848754, average=1.679476, max=2.094837}
Only 1 points for GD
Only 1 points for CjGD
Only 1 points for LBFGS

Returns

Result

TrainingTester.java:435 executed in 0.02 seconds (0.000 gc):

    return TestUtil.compareTime(title + " vs Time", runs);
Logging
Plotting range=[-1.0, -0.07121819986979998], [1.0, 0.3211502422675081]; valueStats=DoubleSummaryStatistics{count=3, sum=5.038428, min=0.848754, average=1.679476, max=2.094837}
Only 1 points for GD
Only 1 points for CjGD
Only 1 points for LBFGS

Returns

Result

Results

TrainingTester.java:255 executed in 0.00 seconds (0.000 gc):

    return grid(inputLearning, modelLearning, completeLearning);

Returns

Result

TrainingTester.java:258 executed in 0.00 seconds (0.000 gc):

    return new ComponentResult(null == inputLearning ? null : inputLearning.value,
        null == modelLearning ? null : modelLearning.value, null == completeLearning ? null : completeLearning.value);

Returns

    {"input":{ "LBFGS": { "type": "NonConverged", "value": 2.0948370301768464 }, "CjGD": { "type": "NonConverged", "value": 0.8487539337439773 }, "GD": { "type": "NonConverged", "value": 2.0948370301768464 } }, "model":null, "complete":null}

LayerTests.java:425 executed in 0.00 seconds (0.000 gc):

    throwException(exceptions.addRef());

Results

detailsresult
{"input":{ "LBFGS": { "type": "NonConverged", "value": 2.0948370301768464 }, "CjGD": { "type": "NonConverged", "value": 0.8487539337439773 }, "GD": { "type": "NonConverged", "value": 2.0948370301768464 } }, "model":null, "complete":null}OK
  {
    "result": "OK",
    "performance": {
      "execution_time": "181.935",
      "gc_time": "16.734"
    },
    "created_on": 1586737777816,
    "file_name": "trainingTest",
    "report": {
      "simpleName": "Float",
      "canonicalName": "com.simiacryptus.mindseye.layers.cudnn.NProductLayerTest.Float",
      "link": "https://github.com/SimiaCryptus/mindseye-cudnn/tree/59d5b3318556370acb2d83ee6ec123ce0fc6974f/src/test/java/com/simiacryptus/mindseye/layers/cudnn/NProductLayerTest.java",
      "javaDoc": ""
    },
    "training_analysis": {
      "input": {
        "LBFGS": {
          "type": "NonConverged",
          "value": 2.0948370301768464
        },
        "CjGD": {
          "type": "NonConverged",
          "value": 0.8487539337439773
        },
        "GD": {
          "type": "NonConverged",
          "value": 2.0948370301768464
        }
      }
    },
    "archive": "s3://code.simiacrypt.us/tests/com/simiacryptus/mindseye/layers/cudnn/NProductLayer/Float/trainingTest/202004132937",
    "id": "3b7beb9b-7924-4662-b1dc-0d13ee9b5ab5",
    "report_type": "Components",
    "display_name": "Comparative Training",
    "target": {
      "simpleName": "NProductLayer",
      "canonicalName": "com.simiacryptus.mindseye.layers.cudnn.NProductLayer",
      "link": "https://github.com/SimiaCryptus/mindseye-cudnn/tree/59d5b3318556370acb2d83ee6ec123ce0fc6974f/src/main/java/com/simiacryptus/mindseye/layers/cudnn/NProductLayer.java",
      "javaDoc": ""
    }
  }