1. Test Modules
  2. Training Characteristics
    1. Input Learning
      1. Gradient Descent
      2. Conjugate Gradient Descent
      3. Limited-Memory BFGS
    2. Results
  3. Results

Subreport: Logs for com.simiacryptus.ref.lang.ReferenceCountingBase

Test Modules

Using Seed 2187361715699072000

Training Characteristics

Input Learning

In this apply, we use a network to learn this target input, given it's pre-evaluated output:

TrainingTester.java:332 executed in 0.05 seconds (0.000 gc):

    return RefArrays.stream(RefUtil.addRef(input_target)).flatMap(RefArrays::stream).map(x -> {
      try {
        return x.prettyPrint();
      } finally {
        x.freeRef();
      }
    }).reduce((a, b) -> a + "\n" + b).orElse("");

Returns

    [
    	[ [ -0.848, -1.252, 0.556 ], [ 0.6, 1.016, 1.484 ], [ 1.488, 0.148, 1.208 ], [ 1.1, 1.584, -1.548 ], [ -1.492, -0.936, -1.416 ], [ 0.424, -0.996, -0.18 ], [ 1.276, 1.452, 0.912 ], [ -1.32, -0.076, 1.936 ], ... ],
    	[ [ 0.672, -1.272, -0.116 ], [ -1.356, 1.96, 0.888 ], [ 1.656, 1.556, -0.552 ], [ 0.384, 0.02, 1.72 ], [ 0.472, -1.104, 1.86 ], [ 1.728, -1.324, -0.26 ], [ 1.392, 1.836, -1.66 ], [ 1.644, -1.108, -1.452 ], ... ],
    	[ [ -1.26, -1.796, -1.636 ], [ -0.08, -0.364, -1.112 ], [ -1.0, 1.048, 0.636 ], [ -1.26, 1.34, -1.552 ], [ 0.096, -0.58, -0.64 ], [ 1.896, 0.604, 0.352 ], [ -0.376, -1.816, -1.728 ], [ 0.688, 0.656, 0.372 ], ... ],
    	[ [ -1.168, 1.46, 0.72 ], [ -1.812, 1.052, -0.272 ], [ -0.196, 1.464, 0.728 ], [ 0.84, 0.116, 0.408 ], [ 1.812, -1.476, 0.584 ], [ 0.104, -0.352, 1.5 ], [ -1.58, -0.148, -1.74 ], [ -0.624, -0.76, -0.824 ], ... ],
    	[ [ -0.876, 1.892, 1.848 ], [ 1.288, 1.592, -1.16 ], [ 0.46, -1.728, -0.3 ], [ 1.084, -1.552, -1.128 ], [ 0.56, 0.348, 0.684 ], [ -1.304, 1.012, 0.128 ], [ 0.04, 1.86, 0.556 ], [ -0.228, 1.84, 0.236 ], ... ],
    	[ [ 1.236, 0.928, 0.52 ], [ -1.096, -1.884, -1.716 ], [ -1.916, 0.028, -1.1 ], [ -1.904, -1.324, -1.4 ], [ 1.016, 1.388, -0.732 ], [ 1.688, 0.744, -0.84 ], [ 0.8, 1.908, 1.704 ], [ -1.392, -0.692, -1.092 ], ... ],
    	[ [ -0.168, 0.072, 1.1 ], [ 0.124, -1.988, -1.032 ], [ -1.548, 1.836, 0.12 ], [ 0.784, 0.636, 1.348 ], [ -0.788, 0.984, -0.428 ], [ -1.296, 0.556, -0.328 ], [ -0.356, -1.512, 0.772 ], [ -0.344, 0.328, -1.948 ], ... ],
    	[ [ 1.244, 1.516, -1.568 ], [ 0.552, 1.092, 0.18 ], [ -0.232, -1.068, -0.44 ], [ -1.372, 0.584, -1.812 ], [ -0.332, -0.712, -0.04 ], [ 1.512, -0.04, 0.828 ], [ 1.292, 1.172, 0.104 ], [ -0.68, -1.512, -1.74 ], ... ],
    	...
    ]
    [
    	[ [ 0.824, 1.632, 1.62 ], [ -1.8, 1.28, 1.696 ], [ -1.992, 1.948, -0.392 ], [ 1.72, -0.224, -0.472 ], [ -0.988, 0.104, 1.08 ], [ 1.612, 1.724, -1.956 ], [ 1.252, -1.924, -0.64 ], [ 1.016, -1.956, 0.728 ], ... ],
    	[ [ -0.608, 0.192, -0.2 ], [ 0.004, -0.748, -0.62 ], [ 0.344, 0.936, 1.532 ], [ -1.96, 1.264, -1.408 ], [ -0.64, 0.916, -0.16 ], [ 1.996, 1.7, 0.816 ], [ 1.38, -1.324, -0.368 ], [ -0.824, -1.352, 1.084 ], ... ],
    	[ [ 1.672, -0.38, -0.448 ], [ 1.14, -1.64, -0.88 ], [ -1.712, 0.272, 1.96 ], [ 1.848, 0.1, -1.392 ], [ 0.72, 0.86, 0.916 ], [ 1.448, -1.012, -1.912 ], [ -0.4, 1.816, -1.684 ], [ -1.692, -0.368, 0.836 ], ... ],
    	[ [ -1.44, -0.224, 1.004 ], [ -0.352, -1.444, -1.068 ], [ -1.688, 1.164, 0.252 ], [ 1.948, -0.06, 0.728 ], [ 1.28, 0.568, 0.672 ], [ -0.056, -0.852, -1.192 ], [ 0.248, 0.008, -1.364 ], [ -0.832, 0.968, 0.864 ], ... ],
    	[ [ 1.208, 1.936, -0.66 ], [ -1.008, 1.648, -1.996 ], [ -1.052, -1.024, -1.656 ], [ 0.596, -0.02, 1.908 ], [ 0.732, 1.38, -1.24 ], [ -1.612, -1.14, 1.404 ], [ 1.168, 1.96, -0.244 ], [ 0.824, 1.9, 1.708 ], ... ],
    	[ [ -0.936, 0.556, -0.328 ], [ -0.32, -0.868, 0.028 ], [ -1.016, 1.98, -1.488 ], [ -1.28, -0.824, 0.596 ], [ 1.08, -0.696, 0.82 ], [ 0.628, -0.448, 0.684 ], [ -1.688, 1.38, 1.396 ], [ -0.22, -0.292, 1.284 ], ... ],
    	[ [ -1.984, 1.928, -0.372 ], [ 1.872, -0.012, -0.808 ], [ 1.356, 0.22, 0.052 ], [ 1.252, -1.116, -1.06 ], [ -1.788, 1.016, -1.564 ], [ -0.684, -1.476, 0.172 ], [ 1.864, -1.74, -0.304 ], [ 0.04, 1.132, 1.528 ], ... ],
    	[ [ -1.568, 1.716, -1.932 ], [ -0.556, 1.552, -1.148 ], [ -1.516, 0.332, 0.168 ], [ -0.696, 1.44, -1.896 ], [ 0.268, 0.384, 0.068 ], [ 0.596, 1.448, 1.276 ], [ -0.576, 0.196, 1.876 ], [ 0.032, 0.824, 0.216 ], ... ],
    	...
    ]
    [
    	[ [ 1.256, 0.608, -1.38 ], [ -1.368, -1.016, -1.588 ], [ -0.12, -1.164, -0.372 ], [ -1.856, -0.628, -1.82 ], [ 1.4, 1.544, 0.864 ], [ 0.3, -0.004, 1.956 ], [ -0.876, -1.516, -1.24 ], [ -1.7, -1.752, -1.26 ], ... ],
    	[ [ 1.432, 1.304, -0.544 ], [ -1.06, -1.4, -0.932 ], [ -0.332, -1.4, -0.652 ], [ -1.256, -0.276, -0.44 ], [ 1.696, -0.476, -1.648 ], [ 1.832, 1.872, 1.772 ], [ -1.276, -1.084, -1.876 ], [ 0.856, -0.528, 0.064 ], ... ],
    	[ [ 0.196, 1.408, 1.06 ], [ -0.812, 1.5, 1.74 ], [ -1.124, -1.94, -0.38 ], [ 0.84, 1.188, 0.308 ], [ -0.832, -0.596, 1.36 ], [ 1.108, -1.716, 1.904 ], [ -0.056, -1.452, -1.128 ], [ 1.716, 0.588, 0.8 ], ... ],
    	[ [ 0.292, 1.28, -1.044 ], [ -1.344, -1.788, 0.408 ], [ -1.52, 0.3, -1.452 ], [ 1.488, -0.688, 1.564 ], [ -0.12, 1.684, 0.94 ], [ -1.176, -1.588, 1.348 ], [ 1.296, -0.264, 0.696 ], [ -1.248, -0.64, 0.124 ], ... ],
    	[ [ 0.768, -0.504, 0.516 ], [ -0.772, 1.736, -1.8 ], [ -1.52, -1.608, -1.996 ], [ 0.672, 0.336, 1.628 ], [ 0.96, -1.956, -1.952 ], [ 0.276, -1.572, -1.108 ], [ 1.036, 1.392, -0.164 ], [ 0.44, -1.348, -1.556 ], ... ],
    	[ [ -0.372, 1.9, -0.756 ], [ 0.292, 0.088, 0.444 ], [ -1.568, -0.936, -0.632 ], [ 1.656, -1.172, -1.612 ], [ 1.676, 1.724, -0.916 ], [ 1.22, 0.6, 0.504 ], [ 1.816, 1.236, -0.616 ], [ -0.108, -1.66, -0.604 ], ... ],
    	[ [ 0.86, -1.768, -0.956 ], [ 1.828, 0.384, -0.432 ], [ 0.812, 0.868, -1.236 ], [ -1.528, -0.148, 1.236 ], [ -1.036, 1.28, 0.704 ], [ -0.768, -1.54, 0.536 ], [ 0.94, -1.824, 0.236 ], [ -0.656, -0.496, 1.684 ], ... ],
    	[ [ -1.12, -0.524, -0.912 ], [ -0.236, 1.644, 0.444 ], [ 0.156, 1.684, 0.096 ], [ 0.296, 0.328, -1.288 ], [ -1.684, 1.18, -0.892 ], [ 0.568, 0.068, 0.916 ], [ -0.1, -1.144, 1.62 ], [ -1.544, 0.716, -1.4 ], ... ],
    	...
    ]
    [
    	[ [ -1.532, -0.764, 0.572 ], [ -1.14, -0.648, 1.328 ], [ 0.468, -1.74, 1.64 ], [ 1.24, -0.612, 0.696 ], [ 0.12, -0.784, 1.444 ], [ -1.836, -1.256, 1.508 ], [ -1.44, -0.764, -1.112 ], [ 1.196, -0.52, -1.412 ], ... ],
    	[ [ -1.2, -0.704, 1.048 ], [ 0.8, 0.068, 0.996 ], [ 1.224, 0.448, -1.36 ], [ -0.804, -0.276, 1.404 ], [ -0.576, 0.236, 0.3 ], [ 1.344, 0.484, 0.108 ], [ -0.016, 1.164, 0.556 ], [ 1.712, -0.068, -1.94 ], ... ],
    	[ [ -0.216, -0.988, -1.108 ], [ -0.54, 1.32, 0.972 ], [ -1.372, -1.468, 0.496 ], [ 0.528, 1.368, -1.232 ], [ 0.828, -0.464, -1.428 ], [ -0.572, -0.544, -1.732 ], [ -0.412, -0.204, 0.42 ], [ 1.82, 0.028, 0.256 ], ... ],
    	[ [ -1.372, -0.536, -1.444 ], [ -1.912, -1.764, -1.476 ], [ -1.56, -0.888, -1.252 ], [ -1.048, 0.192, 1.92 ], [ 1.112, 1.176, -0.108 ], [ 0.396, -1.852, -0.212 ], [ -0.284, 0.508, -1.572 ], [ -1.392, 1.596, 1.564 ], ... ],
    	[ [ 1.128, -0.588, 0.94 ], [ 1.164, -1.384, -1.216 ], [ -0.28, 0.976, -1.728 ], [ 1.328, -1.916, 0.504 ], [ -0.464, 1.852, -1.744 ], [ -0.544, 0.444, -1.804 ], [ -1.336, 1.304, -1.856 ], [ -0.428, 1.628, 1.804 ], ... ],
    	[ [ -1.904, -1.488, 0.344 ], [ 0.872, -0.948, 1.496 ], [ -1.236, 1.38, 0.26 ], [ -1.28, -0.056, -1.404 ], [ 0.66, 0.864, -0.92 ], [ 1.276, 0.396, -0.488 ], [ -0.948, -0.128, -1.264 ], [ 1.708, 0.112, -0.62 ], ... ],
    	[ [ -0.376, -0.592, 1.7 ], [ 1.996, 0.012, -0.044 ], [ 0.784, 0.56, 0.184 ], [ 0.676, -0.232, -1.772 ], [ 1.444, -1.404, -0.904 ], [ -0.312, 1.88, 0.608 ], [ -0.076, 0.376, 0.052 ], [ 0.156, -1.892, 1.964 ], ... ],
    	[ [ 0.884, 1.124, 1.132 ], [ -1.94, 0.116, -0.496 ], [ 0.452, 0.42, 0.54 ], [ -1.116, -1.608, -0.98 ], [ 1.096, -0.28, -0.296 ], [ -1.576, -0.928, -1.776 ], [ -0.236, 0.324, -0.292 ], [ 0.504, -1.9, 1.572 ], ... ],
    	...
    ]
    [
    	[ [ 1.88, 1.424, -1.68 ], [ -1.724, -1.736, -0.244 ], [ 1.528, -1.572, -1.528 ], [ -0.564, -0.164, -0.504 ], [ -1.444, -0.956, -1.932 ], [ 1.1, 0.832, 1.708 ], [ -1.764, 1.896, -1.932 ], [ 0.836, 1.384, -1.28 ], ... ],
    	[ [ -1.396, -1.396, 0.244 ], [ -0.356, -1.376, 0.472 ], [ 1.48, -0.444, 1.656 ], [ -0.148, -0.508, 1.628 ], [ -1.188, 0.792, -1.66 ], [ -1.096, -1.208, -1.16 ], [ -0.948, 1.096, 1.316 ], [ -1.016, -0.1, -0.54 ], ... ],
    	[ [ -1.54, -0.32, 1.132 ], [ -1.66, -1.108, 0.0 ], [ -1.324, -1.604, -1.844 ], [ -1.3, -1.852, 1.78 ], [ -1.824, 1.236, 1.18 ], [ 1.504, 0.632, 0.124 ], [ 1.616, 1.048, -1.228 ], [ -1.3, -1.944, 0.764 ], ... ],
    	[ [ -0.608, -1.148, 1.392 ], [ -1.832, 1.868, -1.908 ], [ -1.012, -1.392, -0.876 ], [ 0.028, 0.892, 1.064 ], [ -0.928, 1.288, 0.156 ], [ -0.688, 0.488, 0.456 ], [ 1.688, -0.26, 1.988 ], [ -0.364, 0.44, -0.504 ], ... ],
    	[ [ 1.588, 0.26, 1.552 ], [ -0.116, -0.58, 0.888 ], [ -1.08, -1.524, -0.132 ], [ -1.576, -1.388, 1.012 ], [ -0.044, -1.092, 1.232 ], [ -1.504, -0.116, 1.688 ], [ 1.556, -0.272, -0.784 ], [ 1.108, -0.272, -1.228 ], ... ],
    	[ [ 1.688, -1.58, 1.596 ], [ 0.3, -1.212, 0.78 ], [ 0.056, 0.716, -0.34 ], [ -1.32, 0.26, -0.728 ], [ 1.608, 1.976, -0.016 ], [ -0.436, 1.172, 0.468 ],

...skipping 992 bytes...

    168, 1.524 ], [ 1.728, 1.58, -1.548 ], [ -0.48, 1.98, -0.636 ], [ -0.2, -1.664, 0.844 ], [ -0.06, -1.628, 0.524 ], [ 1.16, -0.912, -0.784 ], [ 0.32, 1.684, 1.66 ], ... ],
    	[ [ -1.024, 1.56, 0.272 ], [ -1.164, 0.168, 1.244 ], [ -1.512, 0.216, 1.284 ], [ -0.512, -1.464, -0.452 ], [ -1.86, 0.108, -0.544 ], [ -1.228, -1.34, 0.848 ], [ -0.288, -0.108, -0.348 ], [ -0.9, 1.724, 1.452 ], ... ],
    	[ [ -0.288, 0.8, -1.184 ], [ -1.848, 0.696, -0.048 ], [ -1.172, 1.292, 1.752 ], [ -0.176, -0.636, -0.392 ], [ 1.164, -0.224, 1.512 ], [ 0.084, 0.344, 0.216 ], [ -1.32, 0.064, -0.52 ], [ 0.032, 0.592, 0.268 ], ... ],
    	[ [ 0.432, -0.108, 0.296 ], [ -1.816, -0.152, 0.152 ], [ 1.468, 0.548, 1.704 ], [ -0.316, 0.4, 0.76 ], [ -0.272, -0.504, 1.84 ], [ 0.3, 0.44, 1.06 ], [ 0.996, 0.136, 1.46 ], [ 0.024, -1.696, 1.948 ], ... ],
    	[ [ -1.384, -1.416, 1.764 ], [ -0.788, 1.26, 0.676 ], [ -1.768, -0.416, 0.16 ], [ -1.088, -1.988, 0.74 ], [ -1.0, -0.484, -0.848 ], [ -0.264, 1.38, 1.38 ], [ 1.248, -0.832, -1.94 ], [ -0.62, 1.26, -0.12 ], ... ],
    	[ [ -0.908, -0.844, -0.66 ], [ 0.816, -0.468, -0.164 ], [ 1.504, 1.56, -0.392 ], [ -0.332, 1.536, 1.36 ], [ -0.22, -0.76, 0.088 ], [ -0.32, -0.516, -1.708 ], [ 1.176, -0.988, 0.444 ], [ 1.016, 1.66, -0.396 ], ... ],
    	...
    ]
    [
    	[ [ 1.332, -0.172, 1.352 ], [ -0.028, -1.124, 1.16 ], [ -0.008, 1.348, 1.176 ], [ -1.364, 1.84, 1.572 ], [ -0.744, 1.008, -1.308 ], [ 1.584, -1.764, 0.688 ], [ 0.668, -0.184, -0.016 ], [ -1.004, -0.316, 1.556 ], ... ],
    	[ [ -1.24, 1.14, 0.48 ], [ -0.564, -1.108, 1.392 ], [ -0.876, 0.572, -0.432 ], [ 1.708, -0.632, 0.94 ], [ -1.568, -0.084, 0.744 ], [ -0.36, 0.404, -0.348 ], [ -0.988, -1.172, 1.484 ], [ -1.14, 1.984, 0.38 ], ... ],
    	[ [ 0.08, 0.916, 1.732 ], [ -0.192, 0.008, 0.536 ], [ 1.6, 1.8, -1.46 ], [ -1.564, -0.916, -0.388 ], [ -0.824, -0.052, -1.296 ], [ -1.592, -1.968, -1.648 ], [ -0.952, 0.6, 1.188 ], [ 1.192, 0.06, -1.536 ], ... ],
    	[ [ 0.772, -0.416, -0.176 ], [ -1.884, -0.392, -0.124 ], [ -0.336, -1.196, 1.832 ], [ 1.456, 0.656, 1.788 ], [ -0.852, -1.652, -1.816 ], [ 0.388, 1.668, 1.708 ], [ 0.448, -0.848, -1.856 ], [ -1.992, 0.708, 0.72 ], ... ],
    	[ [ 0.036, 0.296, -1.208 ], [ -0.444, -0.564, 1.524 ], [ -1.308, 1.88, 0.604 ], [ -1.144, -1.46, 0.716 ], [ 0.536, 1.272, -1.44 ], [ -0.8, 0.892, -1.12 ], [ 0.06, 0.1, -0.548 ], [ 0.992, -0.296, 1.904 ], ... ],
    	[ [ -0.104, -0.316, 1.212 ], [ 0.264, 0.876, 0.676 ], [ -1.26, 1.68, 0.632 ], [ 0.48, -0.468, 1.056 ], [ 0.144, -0.772, -0.048 ], [ 1.76, -1.736, -0.22 ], [ -1.024, 1.244, -0.324 ], [ 1.98, -0.128, -0.884 ], ... ],
    	[ [ -1.492, 0.704, -1.28 ], [ -0.948, -0.568, -1.184 ], [ -1.78, 1.652, 0.316 ], [ 0.716, 0.052, 0.284 ], [ 1.74, 0.232, -0.468 ], [ 0.104, 1.804, 1.292 ], [ 0.084, 1.692, 1.912 ], [ -1.572, -1.368, -0.54 ], ... ],
    	[ [ -1.348, 0.428, -1.864 ], [ -0.576, -1.504, -0.396 ], [ 0.364, -0.892, 1.228 ], [ -1.252, -0.524, 1.06 ], [ -1.78, 1.872, 0.228 ], [ 0.3, 0.044, 0.812 ], [ -1.312, -0.8, 0.848 ], [ -0.896, -1.272, -0.16 ], ... ],
    	...
    ]
    [
    	[ [ 1.02, -1.764, 0.096 ], [ -0.78, -0.82, 1.832 ], [ 0.072, -0.8, 1.508 ], [ -1.932, 1.912, 0.448 ], [ -0.572, 0.984, 1.132 ], [ -0.044, 0.044, 0.232 ], [ -0.572, -1.304, 0.724 ], [ 1.304, -1.672, -1.196 ], ... ],
    	[ [ 0.42, 0.428, -1.224 ], [ -0.044, -1.388, 1.056 ], [ -1.228, -0.368, 0.644 ], [ 0.464, -1.756, 1.344 ], [ 1.44, 0.768, 0.232 ], [ 0.128, 1.768, 0.316 ], [ -1.88, -1.256, -0.124 ], [ 1.5, 0.832, 0.408 ], ... ],
    	[ [ 1.556, -0.972, -1.468 ], [ 0.56, -1.844, 0.32 ], [ -0.932, 1.912, 1.488 ], [ 0.896, 1.256, 1.16 ], [ 1.724, 1.208, -0.364 ], [ 0.988, 0.276, 1.748 ], [ 1.056, 1.7, -1.752 ], [ -1.22, 1.488, -1.464 ], ... ],
    	[ [ 1.148, 1.116, 0.428 ], [ -1.028, 0.048, 0.704 ], [ 1.968, -1.604, -0.628 ], [ 0.376, 1.456, 0.856 ], [ 0.976, 0.792, -1.196 ], [ 1.156, 1.888, -1.92 ], [ -0.848, 0.916, -0.88 ], [ -0.72, 1.964, -0.108 ], ... ],
    	[ [ -1.576, -1.792, -0.924 ], [ -1.276, -0.652, 1.744 ], [ -1.84, 1.8, 1.372 ], [ -0.54, 2.0, -1.336 ], [ -1.996, -0.208, -1.816 ], [ -0.736, 0.988, 0.364 ], [ -1.872, -0.9, 1.024 ], [ 1.696, -1.732, -0.852 ], ... ],
    	[ [ -1.512, 1.872, -0.34 ], [ -1.144, 1.92, 1.316 ], [ -0.688, -1.132, 1.612 ], [ 0.156, -1.064, -0.936 ], [ -0.744, 0.72, -1.84 ], [ -1.036, 0.972, -0.76 ], [ -0.512, -1.668, -1.788 ], [ 0.124, -1.904, -1.828 ], ... ],
    	[ [ 1.62, 0.1, 0.584 ], [ -1.268, 0.448, 1.016 ], [ -1.836, -1.32, 0.86 ], [ -0.808, 1.9, 1.528 ], [ 0.652, 0.388, 0.488 ], [ -1.076, -0.156, 0.544 ], [ 1.932, 1.68, 0.912 ], [ 0.468, 1.348, -0.336 ], ... ],
    	[ [ 0.772, -1.02, 1.48 ], [ 1.328, -0.28, 0.868 ], [ -0.092, -1.636, 1.856 ], [ 0.42, -1.9, -0.284 ], [ -0.98, -0.732, 0.952 ], [ -1.48, 0.468, -0.704 ], [ 1.052, 1.212, -0.34 ], [ -1.628, -1.356, -1.164 ], ... ],
    	...
    ]
    [
    	[ [ 0.108, 1.624, 0.408 ], [ 1.308, -1.056, -1.4 ], [ 0.652, 0.516, -1.544 ], [ -0.34, -0.904, -1.144 ], [ 0.9, -0.904, 1.0 ], [ -1.228, 0.636, -1.236 ], [ 0.748, 1.552, -1.088 ], [ -1.024, -0.188, 1.356 ], ... ],
    	[ [ 1.332, 1.848, -0.34 ], [ 0.548, 0.116, -0.012 ], [ 0.248, 0.116, -1.696 ], [ -0.104, 0.72, 0.228 ], [ 0.168, -1.968, -1.6 ], [ -1.404, 0.892, 1.336 ], [ -0.264, -0.388, 1.856 ], [ -1.132, 1.72, -0.296 ], ... ],
    	[ [ -0.356, 1.548, 1.9 ], [ 0.792, 0.844, 0.712 ], [ -1.1, 1.276, -0.44 ], [ -1.24, 0.868, -0.372 ], [ -0.144, 0.548, -0.38 ], [ 0.888, -1.636, -0.568 ], [ -0.8, -0.476, -0.972 ], [ 0.684, 1.64, 1.596 ], ... ],
    	[ [ 0.06, -1.932, 0.2 ], [ 0.932, -1.224, 0.82 ], [ 0.732, -1.636, 0.252 ], [ 0.588, -1.512, -0.94 ], [ -1.112, 0.208, 1.948 ], [ -0.664, -1.816, 1.776 ], [ -0.832, 1.472, 0.044 ], [ -1.892, 0.852, -0.04 ], ... ],
    	[ [ -1.4, -0.084, -1.292 ], [ -1.68, 1.192, -1.292 ], [ -0.548, -0.54, -0.7 ], [ -1.024, 0.54, -0.896 ], [ -0.956, -1.412, 0.972 ], [ 1.076, -0.416, -0.568 ], [ -1.652, 1.08, 0.636 ], [ 0.04, -0.516, -1.644 ], ... ],
    	[ [ -1.668, 1.704, -0.832 ], [ 0.06, 1.98, -0.316 ], [ 0.092, 1.408, 1.332 ], [ 1.868, -1.552, -0.748 ], [ 1.82, 0.184, 1.304 ], [ -1.96, 1.864, 0.628 ], [ -0.736, 0.956, -1.008 ], [ 1.676, -0.264, -1.4 ], ... ],
    	[ [ 0.972, 0.396, 1.676 ], [ -0.232, -0.088, -0.616 ], [ -1.264, -1.804, -1.964 ], [ 0.228, 0.848, 0.608 ], [ 1.612, 1.616, -1.232 ], [ -0.82, 0.072, -1.208 ], [ 0.228, 1.388, -1.532 ], [ 1.684, 1.608, -1.36 ], ... ],
    	[ [ -0.744, 0.24, 0.756 ], [ 0.464, -1.176, 0.444 ], [ 0.848, 1.22, -1.924 ], [ 1.42, 0.96, -1.996 ], [ 1.884, 0.58, -0.556 ], [ -0.172, -1.944, -0.176 ], [ 0.66, -0.944, 1.816 ], [ -0.396, 1.676, -0.704 ], ... ],
    	...
    ]
    [
    	[ [ 0.896, 0.844, -0.916 ], [ -0.352, 0.356, -1.28 ], [ -1.256, -1.512, 0.408 ], [ -1.08, 1.232, 0.88 ], [ -0.436, 0.876, -0.788 ], [ 0.768, 0.38, 0.208 ], [ 1.276, 1.988, 0.968 ], [ 1.72, -0.552, 0.892 ], ... ],
    	[ [ 1.608, -1.932, 1.436 ], [ 1.872, -0.836, -1.048 ], [ -1.724, 0.636, -0.656 ], [ -0.536, -1.468, -0.672 ], [ -0.408, -1.716, 0.9 ], [ 1.968, 1.536, -0.86 ], [ -0.228, -1.056, -1.72 ], [ 1.204, 1.36, 0.396 ], ... ],
    	[ [ -1.416, 0.784, -0.052 ], [ -1.236, -0.46, -0.216 ], [ -0.576, 1.664, 1.556 ], [ 1.416, -1.552, -0.976 ], [ -0.964, -0.8, -1.988 ], [ 0.068, 0.86, 0.084 ], [ -1.12, 0.212, 1.524 ], [ 1.5, -1.08, -1.908 ], ... ],
    	[ [ 1.676, 1.084, 0.324 ], [ -1.356, -1.248, 1.04 ], [ -0.144, 1.768, -0.048 ], [ -1.424, 1.116, 0.052 ], [ 0.692, -1.156, 0.84 ], [ 1.648, -0.744, -1.4 ], [ -0.96, -1.12, -1.572 ], [ -1.612, 1.16, -1.74 ], ... ],
    	[ [ -1.608, -0.028, 0.036 ], [ 0.748, -1.556, -1.172 ], [ 0.324, 1.26, 1.36 ], [ 0.98, 1.796, -0.232 ], [ 1.316, -0.98, 1.104 ], [ -0.924, 1.124, 1.596 ], [ 0.748, -0.264, -0.104 ], [ -0.832, -0.66, -1.232 ], ... ],
    	[ [ 0.156, -1.728, -0.912 ], [ 0.892, 0.728, 1.28 ], [ 1.224, 1.676, -1.988 ], [ -0.376, -0.836, -0.552 ], [ 1.064, 1.104, -1.18 ], [ 1.536, 0.44, 1.472 ], [ -1.932, 1.744, 1.732 ], [ 0.376, -0.988, 1.976 ], ... ],
    	[ [ -1.296, 0.14, 0.668 ], [ -1.148, 1.356, 1.596 ], [ 0.0, -1.192, 1.132 ], [ -1.508, -0.696, 1.88 ], [ -0.936, -0.58, 0.972 ], [ 0.752, 0.444, -1.112 ], [ -1.02, -0.272, 0.548 ], [ 0.528, 0.212, 1.724 ], ... ],
    	[ [ -1.168, 0.436, 1.192 ], [ -0.58, 0.588, -1.816 ], [ -1.168, -0.052, 1.216 ], [ -1.456, -1.032, -0.244 ], [ 1.744, -1.0, 0.492 ], [ 0.8, 1.416, 0.928 ], [ 0.736, 0.064, -1.572 ], [ -1.624, 1.364, -1.1 ], ... ],
    	...
    ]

Gradient Descent

First, we train using basic gradient descent method apply weak line search conditions.

TrainingTester.java:480 executed in 33.26 seconds (4.065 gc):

    IterativeTrainer iterativeTrainer = new IterativeTrainer(trainable.addRef());
    try {
      iterativeTrainer.setLineSearchFactory(label -> new ArmijoWolfeSearch());
      iterativeTrainer.setOrientation(new GradientDescent());
      iterativeTrainer.setMonitor(TrainingTester.getMonitor(history));
      iterativeTrainer.setTimeout(30, TimeUnit.SECONDS);
      iterativeTrainer.setMaxIterations(250);
      iterativeTrainer.setTerminateThreshold(0);
      return iterativeTrainer.run();
    } finally {
      iterativeTrainer.freeRef();
    }
Logging
Reset training subject: 2653317452023
Reset training subject: 2658008453924
Constructing line search parameters: GD
th(0)=3.554075794831487;dx=-2.456767219532909E-6
New Minimum: 3.554075794831487 > 3.55407050189102
WOLFE (weak): th(2.154434690031884)=3.55407050189102; dx=-2.456763454029632E-6 evalInputDelta=5.292940466805618E-6
New Minimum: 3.55407050189102 > 3.5540652089586664
WOLFE (weak): th(4.308869380063768)=3.5540652089586664; dx=-2.4567596885309497E-6 evalInputDelta=1.0585872820545461E-5
New Minimum: 3.5540652089586664 > 3.554044037310374
WOLFE (weak): th(12.926608140191302)=3.554044037310374; dx=-2.456744626582175E-6 evalInputDelta=3.175752111284069E-5
New Minimum: 3.554044037310374 > 3.55394876649932
WOLFE (weak): th(51.70643256076521)=3.55394876649932; dx=-2.4566768487225466E-6 evalInputDelta=1.2702833216682308E-4
New Minimum: 3.55394876649932 > 3.553440699896123
WOLFE (weak): th(258.53216280382605)=3.553440699896123; dx=-2.4563153919489498E-6 evalInputDelta=6.350949353639557E-4
New Minimum: 3.553440699896123 > 3.550266976928362
WOLFE (weak): th(1551.1929768229563)=3.550266976928362; dx=-2.454057246387296E-6 evalInputDelta=0.003808817903125039
New Minimum: 3.550266976928362 > 3.5275021791004293
WOLFE (weak): th(10858.350837760694)=3.5275021791004293; dx=-2.4378473559825956E-6 evalInputDelta=0.026573615731057654
New Minimum: 3.5275021791004293 > 3.3471512807343977
WOLFE (weak): th(86866.80670208555)=3.3471512807343977; dx=-2.3086367041962617E-6 evalInputDelta=0.20692451409708923
New Minimum: 3.3471512807343977 > 2.094449815058231
END: th(781801.26031877)=2.094449815058231; dx=-1.3632877039585771E-6 evalInputDelta=1.4596259797732558
Fitness changed from 3.554075794831487 to 2.094449815058231
Iteration 1 complete. Error: 2.094449815058231 Total: 33.2449; Orientation: 1.2482; Line Search: 25.3727
Final threshold in iteration 1: 2.094449815058231 (> 0.0) after 33.247s (< 30.000s)

Returns

    2.094449815058231

This training apply resulted in the following configuration:

TrainingTester.java:610 executed in 0.00 seconds (0.000 gc):

    RefList<double[]> state = network.state();
    assert state != null;
    String description = state.stream().map(RefArrays::toString).reduce((a, b) -> a + "\n" + b)
        .orElse("");
    state.freeRef();
    return description;

Returns

    

And regressed input:

TrainingTester.java:622 executed in 0.01 seconds (0.000 gc):

    return RefArrays.stream(RefUtil.addRef(data)).flatMap(x -> {
      return RefArrays.stream(x);
    }).limit(1).map(x -> {
      String temp_18_0015 = x.prettyPrint();
      x.freeRef();
      return temp_18_0015;
    }).reduce((a, b) -> a + "\n" + b).orElse("");

Returns

    [
    	[ [ 1.344264774945441, -0.13154753895252708, 0.005245628964374312 ], [ -0.9916004420625937, 1.2054323077186737, 1.325690394052103 ], [ -0.5691139525127251, -1.265421339372395, 0.21941943972052147 ], [ -0.8127329186758179, -0.7939951568163336, -1.0886802872732353 ], [ -0.44955614911410985, 1.3365659474863727, -1.894271310983701 ], [ 0.6071847787109818, -0.9371473794979388, -1.2588326139177448 ], [ 0.909964913364151, 1.3050025502391918, -0.3361240957428801 ], [ 1.6325103611674034, -0.32790545676975047, 0.12868536419440949 ], ... ],
    	[ [ -1.036256158586689, -0.49289188258695654, -0.587004725717368 ], [ -1.2707328025938032, -0.12820271255625887, 0.527527231648786 ], [ 1.1933662631052029, -0.8967745621242299, -1.506852396737469 ], [ -1.4185351208867936, 0.5199063106082474, -1.8072239019303362 ], [ 0.5357946706857717, 0.9653013396526506, -0.5757016168292926 ], [ -0.5647084740860518, -0.9557196065419874, 1.7978057630086297 ], [ -1.642641862818869, 0.15556844143802448, -1.0689907068182571 ], [ -0.15521477966216943, 1.560804285739631, -0.8390453381486931 ], ... ],
    	[ [ -0.11712736391920744, 1.3101710886509899, 1.4335833386591201 ], [ -0.2905439587008597, -0.17948129619568517, 0.6919024101713702 ], [ -1.6416100602815984, -0.27492380551168805, 0.4638910932958972 ], [ 0.657908125768136, 1.4607440990498177, 0.41713468547149735 ], [ -0.029481359344500184, -0.30033123198192674, -1.190367341945115 ], [ 1.6926196666528708, 1.1543187747355053, 1.255292224462348 ], [ -0.3188625312863202, 0.48007248002090624, -0.6744059307910044 ], [ 1.53788644970773, 0.008160055727311892, 1.3577192925787747 ], ... ],
    	[ [ 0.1874988750614104, 0.699912591430141, -0.13870828151619466 ], [ 1.5846612448884558, 0.536703079356791, 0.03235105420277837 ], [ -0.258049813227802, 0.4675238619427501, -1.263860710952063 ], [ -1.6614050654596193, 1.5722796653457118, 0.9115688140410729 ], [ -1.2999933299339124, 1.6352555210813589, -0.5758912824161754 ], [ 1.3297781677114018, 0.6807708228719966, -0.7506688650101244 ], [ 1.5551426704921276, -0.2691954000257784, -1.182647298591864 ], [ 0.891991072980153, -1.3955985135017197, 1.075376740066979 ], ... ],
    	[ [ 0.9021006356389875, -0.7316013163609989, -0.45163685978005336 ], [ -0.8016229783726881, 0.35433782490527127, 1.4535564075373555 ], [ -0.9368120824280056, 0.10375533758617489, 1.8623957865288436 ], [ 1.009076990341874, -0.1917732104575104, 1.4595240260654625 ], [ 0.79942556616169, 0.016549605684874694, 0.39522233582088945 ], [ 1.1258142653780572, 1.2115179425963505, -1.2140519597396218 ], [ -1.8318680017453624, -0.17769714292044547, -0.8944342853691574 ], [ 1.470480599876948, -1.5803997639179859, 1.3565145736796207 ], ... ],
    	[ [ -1.4421786671457193, -1.8615777541715866, 0.2047305197305382 ], [ 1.0070327630967508, 0.6018205802871701, -1.8755832874663323 ], [ -0.6556227795129896, 1.602605377469726, 1.8543022102692654 ], [ -0.5672204621737601, -0.5361971559613423, 0.25972457673040655 ], [ 0.49642148740720593, -1.5955964207085556, -0.3738231496972255 ], [ -0.3594347783023105, 1.2573689094127345, -1.1929337783305727 ], [ -0.7169837146491255, -0.7238735405992832, 1.8573933666487443 ], [ -1.6746098717601485, -0.15988873997502556, 0.35147293475581043 ], ... ],
    	[ [ 0.19590326049116022, 0.28884209377351455, 1.8766326961239506 ], [ 0.9896148782224325, -0.3857885379808727, 0.19994595661165676 ], [ -1.5477898867512574, 1.0858480482821566, -1.2903916628179077 ], [ -0.4656599115510719, -1.6560998461443948, -0.7887309231734771 ], [ -0.2017642046236362, 0.8984897411098778, -1.494997258521729 ], [ -1.2886243511777606, -0.7143379746674388, 0.6368647831192168 ], [ 0.20141368848916893, 1.639055743443434, 1.3153002552143969 ], [ -0.9522053210817251, -0.15742317424205238, 0.23319686380187152 ], ... ],
    	[ [ 1.004711732873953, 1.2508339410305647, -1.315450740111068 ], [ 0.8393914956657385, -1.4802714579865062, 0.610260038223152 ], [ -0.06142066025061431, 0.9115089744231918, 0.9990790951980765 ], [ -1.2045985448451995, -1.7682124059753448, -0.7583405577570275 ], [ 1.668098508175398, 1.9189859516452825, -0.48821094661299225 ], [ -1.4516977889713902, 1.0762004725723786, -0.11869768412522431 ], [ 1.3889025098941057, -0.8841196346589826, 0.09530183382262665 ], [ 0.05967933407861947, -0.26976755087947357, 0.1114494525468365 ], ... ],
    	...
    ]

To produce the following output:

TrainingTester.java:633 executed in 0.09 seconds (0.000 gc):

    Result[] array = ConstantResult.batchResultArray(pop(RefUtil.addRef(data)));
    @Nullable
    Result eval = layer.eval(array);
    assert eval != null;
    TensorList tensorList = Result.getData(eval);
    String temp_18_0016 = tensorList.stream().limit(1).map(x -> {
      String temp_18_0017 = x.prettyPrint();
      x.freeRef();
      return temp_18_0017;
    }).reduce((a, b) -> a + "\n" + b).orElse("");
    tensorList.freeRef();
    return temp_18_0016;

Returns

    [
    	[ [ 0.8125550474244345, 0.21834705081943898, -0.003882708485070708 ], [ -1.1956343375804812, 0.8146152662426138, 2.4522115107830365 ], [ 0.23322088458589796, -1.7918863697262257, 0.3507979924876409 ], [ -0.9932824745021016, -0.09194594288775589, 0.3412807027708602 ], [ 0.21677001022027376, -0.6651452486133624, -0.1094924350289605 ], [ -0.9195489089824709, -1.6893629670905377, -1.5664594566072416 ], [ 1.7576032459351814, -1.9817664619934514, -0.6061223327796749 ], [ 0.49068360384860255, -0.5443492400027574, -0.11026865502910323 ], ... ],
    	[ [ -0.8503296054069461, 0.3835302587195772, -0.3012699867516131 ], [ 0.7944268293060278, -0.1548688767679607, 0.4559526254885352 ], [ -0.9476027112052298, -1.2931039613719224, -1.4713907167193063 ], [ -0.6915055241658118, 0.5611120807109287, -0.3335420766321503 ], [ 0.4528093712375245, 1.1267196260115415, -0.5883253585422626 ], [ 0.3033496916987891, -1.5561164670596137, -1.6338905695022616 ], [ 1.898501377805742, -0.05248336870630959, -1.1328156926484343 ], [ -0.0011348988962938363, -0.7745620612730866, -0.13054655732296347 ], ... ],
    	[ [ 0.13386712060574216, 1.3064533364901838, 0.6948917155212004 ], [ 0.5332518476886837, 0.2032678675170611, 1.1540642818925724 ], [ 1.998063289158709, 0.4343965258042818, 0.8897988796099989 ], [ 0.8929953260631985, -1.6838317657491533, -0.4058642469585047 ], [ 0.010145142351834514, -0.38046250220836764, 0.9671613107991555 ], [ 1.7736319651103833, -1.5029678085357545, -1.5052579751598225 ], [ -0.026741007650276424, 0.42632966819557444, 1.2451489793179222 ], [ -1.7521764396449402, -0.005613541440783709, -0.2258101098650273 ], ... ],
    	[ [ 0.28874826759457206, 0.7681417512752859, -0.25171107388200964 ], [ -1.4616001200237854, -1.039852739140279, -0.03850748336080192 ], [ 0.4758469481342958, 0.7908176876454011, -0.14846523118916438 ], [ -0.08193568010095229, -0.47937669447399595, -1.3871958439669068 ], [ -0.22851346460621688, 0.6642386368060444, 0.5083203686678047 ], [ 1.1948116888240237, -0.18955304285606236, 0.1432617810875124 ], [ 1.0262314955555825, 0.19485557867562087, 0.7307875200508105 ], [ 1.574810290669896, 1.3944697645553439, 0.01891156556942275 ], ... ],
    	[ [ 0.29884464638842884, 1.0259926249987112, 0.14032706947412174 ], [ -1.5406561820157119, 0.3833075540137657, 0.6779857487764677 ], [ -0.7210184921419444, -0.08710405954557787, 0.3200149750450222 ], [ 1.6423187473188499, 0.04445539131552606, 0.5956009446340577 ], [ -0.07541777692749832, -0.0071304890681924485, -0.3955519708082775 ], [ 0.20482635855363218, -1.467775725232894, 0.2793896917189719 ], [ 0.9549447125333189, -0.21824155810159016, 1.5510403681125076 ], [ 1.7508324644960127, 0.5609886247656486, 1.6205901949688017 ], ... ],
    	[ [ -1.8571015686220755, 1.3430174127337022, -0.15809148218477048 ], [ -1.3332570048167987, 1.1039562569933974, 0.31661525641435057 ], [ -0.9065878467202246, -1.0752784129245554, 1.6549334773828381 ], [ 0.8101338179846413, 0.11047994420590591, 0.3450575672066179 ], [ -0.8204482985038909, -1.1443494095770346, -0.16434273052153195 ], [ 0.23573220745133455, 0.8851006600225123, -0.8703577969210076 ], [ -0.30026016487596485, -0.9107800088194504, 2.039316996948826 ], [ -1.4461643705934497, 0.23464323401197726, 0.30521921606679076 ], ... ],
    	[ [ 1.4501463283683985E-4, 0.03184356513739446, -0.1378494565799803 ], [ 1.2566847939608725, -0.4139930628855208, 0.21339288837632803 ], [ -1.8577954836893653, -0.9746004554348439, -1.8678496349859004 ], [ -0.5074065731430781, 0.017308230494978056, 0.5744051440470703 ], [ -0.1581952573817418, 0.38629263186672136, -1.1990136014111643 ], [ -0.9617464103223248, 0.8407176492359295, 1.0582584500062344 ], [ 0.2871887844673814, 1.7670524415515863, 0.7637707152561602 ], [ 1.1639651481476423, 0.17791376039896933, -0.4234455317137493 ], ... ],
    	[ [ -0.026430084498904743, 0.6367248625708862, 0.753002866670485 ], [ 0.9390215974195528, 1.995793703439195, 0.6119375696924607 ], [ -0.05878383936366887, -1.0237317925387774, 0.872979813538962 ], [ -1.3205666729138588, 1.0364521972144347, 0.8195406468406797 ], [ 1.3674356729305144, -0.016645900189796507, 0.27223731687182906 ], [ 2.3370943849803085, 1.2891629187643134, -0.024843902797957982 ], [ 0.22711702878382495, -1.464554212620702, 0.1330852262610241 ], [ -0.030314841421153295, 0.3728400468339831, 0.17948413512101552 ], ... ],
    	...
    ]

Conjugate Gradient Descent

First, we use a conjugate gradient descent method, which converges the fastest for purely linear functions.

TrainingTester.java:452 executed in 70.37 seconds (4.953 gc):

    IterativeTrainer iterativeTrainer = new IterativeTrainer(trainable.addRef());
    try {
      iterativeTrainer.setLineSearchFactory(label -> new QuadraticSearch());
      iterativeTrainer.setOrientation(new GradientDescent());
      iterativeTrainer.setMonitor(TrainingTester.getMonitor(history));
      iterativeTrainer.setTimeout(30, TimeUnit.SECONDS);
      iterativeTrainer.setMaxIterations(250);
      iterativeTrainer.setTerminateThreshold(0);
      return iterativeTrainer.run();
    } finally {
      iterativeTrainer.freeRef();
    }
Logging
Reset training subject: 2687026040125
Reset training subject: 2687984452232
Constructing line search parameters: GD
F(0.0) = LineSearchPoint{point=PointSample{avg=3.554075794831487}, derivative=-2.456767219532909E-6}
F(1.0E-10) = LineSearchPoint{point=PointSample{avg=3.554075794831487}, derivative=-2.456767219532909E-6}, evalInputDelta = 0.0
New Minimum: 3.554075794831487 > 3.5540757948314856
F(7.000000000000001E-10) = LineSearchPoint{point=PointSample{avg=3.5540757948314856}, derivative=-2.456767219532908E-6}, evalInputDelta = -1.3322676295501878E-15
New Minimum: 3.5540757948314856 > 3.554075794831475
F(4.900000000000001E-9) = LineSearchPoint{point=PointSample{avg=3.554075794831475}, derivative=-2.4567672195329007E-6}, evalInputDelta = -1.199040866595169E-14
New Minimum: 3.554075794831475 > 3.554075794831403
F(3.430000000000001E-8) = LineSearchPoint{point=PointSample{avg=3.554075794831403}, derivative=-2.456767219532849E-6}, evalInputDelta = -8.393286066166183E-14
New Minimum: 3.554075794831403 > 3.554075794830897
F(2.4010000000000004E-7) = LineSearchPoint{point=PointSample{avg=3.554075794830897}, derivative=-2.4567672195324895E-6}, evalInputDelta = -5.897504706808832E-13
New Minimum: 3.554075794830897 > 3.554075794827358
F(1.6807000000000003E-6) = LineSearchPoint{point=PointSample{avg=3.554075794827358}, derivative=-2.4567672195299717E-6}, evalInputDelta = -4.129141473185882E-12
New Minimum: 3.554075794827358 > 3.5540757948025843
F(1.1764900000000001E-5) = LineSearchPoint{point=PointSample{avg=3.5540757948025843}, derivative=-2.4567672195123462E-6}, evalInputDelta = -2.8902658044671625E-11
New Minimum: 3.5540757948025843 > 3.554075794629162
F(8.235430000000001E-5) = LineSearchPoint{point=PointSample{avg=3.554075794629162}, derivative=-2.4567672193889708E-6}, evalInputDelta = -2.0232482356163928E-10
New Minimum: 3.554075794629162 > 3.5540757934152096
F(5.764801000000001E-4) = LineSearchPoint{point=PointSample{avg=3.5540757934152096}, derivative=-2.456767218525341E-6}, evalInputDelta = -1.4162773176451537E-9
New Minimum: 3.5540757934152096 > 3.5540757849175457
F(0.004035360700000001) = LineSearchPoint{point=PointSample{avg=3.5540757849175457}, derivative=-2.456767212479935E-6}, evalInputDelta = -9.913941223516076E-9
New Minimum: 3.5540757849175457 > 3.554075725433895
F(0.028247524900000005) = LineSearchPoint{point=PointSample{avg=3.554075725433895}, derivative=-2.4567671701620866E-6}, evalInputDelta = -6.939759211732621E-8
New Minimum: 3.554075725433895 > 3.554075309048369
F(0.19773267430000002) = LineSearchPoint{point=PointSample{avg=3.554075309048369}, derivative=-2.456766873937171E-6}, evalInputDelta = -4.857831177318417E-7
New Minimum: 3.554075309048369 > 3.5540723943510946
F(1.3841287201) = LineSearchPoint{point=PointSample{avg=3.5540723943510946}, derivative=-2.456764800363559E-6}, evalInputDelta = -3.4004803923792792E-6
New Minimum: 3.5540723943510946 > 3.554051991539054
F(9.688901040700001) = LineSearchPoint{point=PointSample{avg=3.554051991539054}, derivative=-2.456750285387289E-6}, evalInputDelta = -2.380329243312218E-5
New Minimum: 3.554051991539054 > 3.5539091752299514
F(67.8223072849) = LineSearchPoint{point=PointSample{avg=3.5539091752299514}, derivative=-2.4566486824652613E-6}, evalInputDelta = -1.6661960153552258E-4
New Minimum: 3.5539091752299514 > 3.5529096264352873
F(474.7561509943) = LineSearchPoint{point=PointSample{avg=3.5529096264352873}, derivative=-2.455937555686552E-6}, evalInputDelta = -0.0011661683961996516
New Minimum: 3.5529096264352873 > 3.5459208828879567
F(3323.2930569601003) = LineSearchPoint{point=PointSample{avg=3.5459208828879567}, derivative=-2.4509642564303625E-6}, evalInputDelta = -0.00815491194353024
New Minimum: 3.5459208828879567 > 3.497394746132673
F(23263.0513987207) = LineSearchPoint{point=PointSample{avg=3.497394746132673}, derivative=-2.416375330054306E-6}, evalInputDelta = -0.056681048698814074
New Minimum: 3.497394746132673 > 3.176484247490103
F(162841.3597910449) = LineSearchPoint{point=PointSample{avg=3.176484247490103}, derivative=-2.1850130753876255E-6}, evalInputDelta = -0.377591547341384
New Minimum: 3.176484247490103 > 1.6708934670181776
F(1139889.5185373144) = LineSearchPoint{point=PointSample{avg=1.6708934670181776}, derivative=-1.0158888501736645E-6}, evalInputDelta = -1.8831823278133093
F(7979226.6297612) = LineSearchPoint{point=PointSample{avg=18.563136511965343}, derivative=1.1453835238497548E-5}, evalInputDelta = 15.009060717133856
F(613786.6638277846) = LineSearchPoint{point=PointSample{avg=2.33941549935678}, derivative=-1.5561543362820406E-6}, evalInputDelta = -1.2146602954747068
New Minimum: 1.6708934670181776 > 1.3228256543700931
F(4296506.646794492) = LineSearchPoint{point=PointSample{avg=1.3228256543700931}, derivative=8.296025662236153E-7}, evalInputDelta = -2.231250140461394
1.3228256543700931 <= 3.554075794831487
New Minimum: 1.3228256543700931 > 0.8599118303908722
F(3211907.781680168) = LineSearchPoint{point=PointSample{avg=0.8599118303908722}, derivative=1.0357459997491842E-7}, evalInputDelta = -2.694163964440615
Right bracket at 3211907.781680168
New Minimum: 0.8599118303908722 > 0.8504843875800207
F(3081975.105851829) = LineSearchPoint{point=PointSample{avg=0.8504843875800207}, derivative=4.2044695698990946E-8}, evalInputDelta = -2.703591407251466
Right bracket at 3081975.105851829
New Minimum: 0.8504843875800207 > 0.8489222696122312
F(3030118.179491135) = LineSearchPoint{point=PointSample{avg=0.8489222696122312}, derivative=1.8268071310418995E-8}, evalInputDelta = -2.705153525219256
Right bracket at 3030118.179491135
Converged to right
Fitness changed from 3.554075794831487 to 0.8489222696122312
Iteration 1 complete. Error: 0.8489222696122312 Total: 70.3724; Orientation: 1.3888; Line Search: 66.1249
Final threshold in iteration 1: 0.8489222696122312 (> 0.0) after 70.373s (< 30.000s)

Returns

    0.8489222696122312

This training apply resulted in the following configuration:

TrainingTester.java:610 executed in 0.00 seconds (0.000 gc):

    RefList<double[]> state = network.state();
    assert state != null;
    String description = state.stream().map(RefArrays::toString).reduce((a, b) -> a + "\n" + b)
        .orElse("");
    state.freeRef();
    return description;

Returns

    

And regressed input:

TrainingTester.java:622 executed in 0.00 seconds (0.000 gc):

    return RefArrays.stream(RefUtil.addRef(data)).flatMap(x -> {
      return RefArrays.stream(x);
    }).limit(1).map(x -> {
      String temp_18_0015 = x.prettyPrint();
      x.freeRef();
      return temp_18_0015;
    }).reduce((a, b) -> a + "\n" + b).orElse("");

Returns

    [
    	[ [ 1.0344380432998006, 0.9510606383379598, -0.14071462578735291 ], [ -0.9559415894667797, 1.2785709564261183, 1.3650614543711022 ], [ -0.1581999077485624, 0.0993797761082158, -0.1273481181740459 ], [ 0.4505185764342503, -0.7997328615940614, -1.1136432001390115 ], [ -0.5690640060224084, 1.2576706481839612, -1.8087770582974039 ], [ -0.10836212581002258, -0.9519502980937695, -0.33521416423569184 ], [ 0.8120862515957169, 1.6299771422586624, -0.324977706550796 ], [ 1.334893156186827, -0.017045392006378557, -0.16842856420361998 ], ... ],
    	[ [ -0.944966697502971, -0.38042411549628485, -0.5496327022609029 ], [ -1.1290493862184023, -0.4968901812954391, 0.31910885279268625 ], [ 0.8176876146011975, 0.8034812733283676, -1.3194998480872464 ], [ -1.4258256632166966, 0.3815976878761619, -1.7014625944115256 ], [ 0.3856617262590938, 0.1408086181901993, -0.5058239269814155 ], [ -0.854327559759283, -1.2539981564424685, 1.2163322516951685 ], [ -1.6502393753521363, 0.3153730790082021, -0.4851732435109817 ], [ -0.15295663010367055, 0.9994572411690172, -0.8593064374395007 ], ... ],
    	[ [ 0.5123101617297738, 1.1323624887731798, 1.4381367300090244 ], [ 0.035734790365326174, -0.2815189911955609, 0.6110988993482795 ], [ -1.5312076404137969, -0.21431253885740037, 0.65913341569384 ], [ -0.9815714598692253, 0.577132527049826, -0.2467915820467239 ], [ -0.03374147690462774, -0.3357935916715248, -0.7945610808893432 ], [ 1.9359702953699462, 0.8043859063793717, 0.9541718819161901 ], [ -0.31559138005671905, -0.5320064684944065, -1.4980568157821201 ], [ 1.3247494831670248, 0.043130143929327236, 1.3281538651365403 ], ... ],
    	[ [ 0.7267112229085726, 0.38907304464043146, 0.3596468918442746 ], [ 0.6547983364492926, 0.8033001192391112, -0.058665504507870364 ], [ -0.18342183942301898, 0.8687688782353171, -1.2519568754478494 ], [ -1.5159033208024486, 1.527070868465701, 0.04758386962813832 ], [ -1.00088923803079, 1.3685394236583588, -0.5525720990612266 ], [ 0.9897938785563339, 0.6484777706518211, -0.6203048416501884 ], [ 1.2075791750997746, -0.23812335376554733, -1.3628094299589837 ], [ 0.27078904893510314, -0.508692447746246, 1.0620810931337612 ], ... ],
    	[ [ 0.7701024889104385, -1.570193176599302, -0.3470631432355083 ], [ -0.6567479112539483, 0.8902112056228704, 1.5615617418535728 ], [ -0.8931344193475101, -0.25354950547325394, 1.869285628821152 ], [ 0.4830239914600295, -0.19112100536223733, 0.8369788608237391 ], [ 0.7862703341144645, -0.027882892329284314, 0.4965153083169782 ], [ 1.1310317595832868, 1.1066022420596198, -1.2084497535282226 ], [ -1.6474361466759002, 1.209317343721588, 0.20287867065092846 ], [ 0.41931389895949067, -1.5355363484868385, 0.8633539650226453 ], ... ],
    	[ [ -1.1493592039286624, -1.636049770162422, 0.20683136038317237 ], [ 0.27954542380292824, 0.871631347719556, -1.8513783671704263 ], [ 0.7143506639840738, 1.3340195863584219, 1.849419678645754 ], [ -1.1171354114307204, -0.571273937626402, -0.132178526213525 ], [ -0.49164724838824114, -1.559926003436677, -0.41357599088329955 ], [ -0.4728419621494499, 0.9967305289024386, -1.1381028245577611 ], [ -0.8003355578010829, 1.1055093908386782, 1.9649298252966978 ], [ -0.9919194376867446, -0.148065510795914, -0.0066440470065819435 ], ... ],
    	[ [ 0.19562505542100175, 0.29126380089349346, 1.8784522140402984 ], [ 0.6146512011604037, -0.27589938721525903, 0.3493329927555502 ], [ -1.6277085000050404, 0.7230581912816755, -0.08942674023501951 ], [ -0.056315944766304415, -1.6333804537883634, -0.37671535633755426 ], [ 0.08074391095931643, 0.9516628426107696, -0.8939437380561747 ], [ -0.80728270822485, -0.1689048037029569, 0.08719498367744294 ], [ -0.13961877600613665, 1.8664055503101435, 1.1407389311059264 ], [ -0.5386782191081424, -0.2075290252074527, 1.5595143879411781 ], ... ],
    	[ [ 0.949242217126018, 1.3970230244820654, -1.497923421809683 ], [ 0.4580337784892572, -1.3545161978586437, 0.4097007094477171 ], [ 0.02651990780069717, 0.7145413569074339, 0.7778686923986536 ], [ -0.24004552528099588, -1.734313449262022, -1.3402348607342414 ], [ 1.1852446372701324, 1.9160697347133213, -0.4543077930414203 ], [ -0.3235086400760321, 0.5821365667865969, -0.06893939963964885 ], [ 1.3224783680676726, 0.1163204400029112, 0.11630056758995617 ], [ 0.05875715774937197, 0.31181585150331703, -0.1201991362693306 ], ... ],
    	...
    ]

To produce the following output:

TrainingTester.java:633 executed in 0.09 seconds (0.000 gc):

    Result[] array = ConstantResult.batchResultArray(pop(RefUtil.addRef(data)));
    @Nullable
    Result eval = layer.eval(array);
    assert eval != null;
    TensorList tensorList = Result.getData(eval);
    String temp_18_0016 = tensorList.stream().limit(1).map(x -> {
      String temp_18_0017 = x.prettyPrint();
      x.freeRef();
      return temp_18_0017;
    }).reduce((a, b) -> a + "\n" + b).orElse("");
    tensorList.freeRef();
    return temp_18_0016;

Returns

    [
    	[ [ 0.043575752836457066, -1.2827598170813332, 0.10260819368598448 ], [ -1.124493395911848, 1.039750451080863, 2.5633603454179474 ], [ -0.014424815408726227, 0.013842597770932297, -0.19435311783721226 ], [ 0.09733457922153262, -0.12895233652436794, 0.45633382014958046 ], [ 0.33688860161358136, -0.3859148204450077, -1.5079820393638723 ], [ 0.12373597079095315, -1.7233440127613622, -0.10515257427329151 ], [ 1.530031121623373, -2.9186718150499034, -0.585339688626191 ], [ -0.7482554051303174, -0.026928081985562825, 0.13119071830429826 ], ... ],
    	[ [ -0.6682897145682997, 0.2677057888667008, -0.2587475282006665 ], [ 0.41093800754306764, -0.6002433390048905, 0.23205697654630655 ], [ -0.23085912532491476, 0.09093680583666788, -0.9259316451655065 ], [ -0.7257248825228607, 0.3846876221703035, -2.117423740541542 ], [ 0.28724133362135335, 0.06199430514141732, -0.4964448753709228 ], [ 0.7271684631449125, -2.244502286981919, 0.03900873638130624 ], [ 1.9251311158150126, -0.12214528248959139, -0.22909556818526453 ], [ -0.007853667227450055, 0.6184798387666851, -0.36003537917505446 ], ... ],
    	[ [ -0.49553829533117105, 0.8724172449260341, 0.7166349099415766 ], [ -0.06305359849766883, 0.32249566134581886, 0.9981250040139313 ], [ 1.6397118587383595, 0.33619961737820014, 1.2910626341530824 ], [ -0.20692501644697092, -0.06896974468778388, 0.1442971347058383 ], [ 0.011622836443245797, -0.428099784334994, 0.22624976435644376 ], [ 2.874121265629905, -0.7929720671716884, -0.8454325863273495 ], [ -0.02271268707391631, -0.0808624804614292, 3.0342015955490615 ], [ -1.1410939391662231, -0.029661720826335964, 0.014711706926805292 ], ... ],
    	[ [ 1.1191352832792019, 0.3436228794013635, 0.6223182700112642 ], [ 0.24279310186704958, -1.6060070931336232, 0.06954290640921144 ], [ 0.33612844146902965, 1.5382368462724705, -0.02274375750244642 ], [ 1.105271235830584, -0.17088636933422624, -0.04364286062970846 ], [ 0.6997206441591389, -0.41149351681143814, 0.47926302192195197 ], [ 0.42964294405402714, -0.13289233044604523, -0.09060749732236616 ], [ 0.004719754487037286, 0.16951849098524105, 1.4296993827310533 ], [ 0.3795675669624965, -0.04880648548675552, -0.17195649714476777 ], ... ],
    	[ [ 0.03204688718646373, 2.6463306064415772, 0.05863710270052871 ], [ -1.2205177573598498, 1.039173904433203, 1.6194760258206573 ], [ -0.6402730669404751, 0.18917081821535123, 0.4923066742385349 ], [ 0.6132652896468206, 0.04420104344634757, -0.6980796573753637 ], [ -0.007110348067898694, 0.01192172739445945, -0.51525499763705 ], [ 0.7585168142686107, -1.2245685221313773, 0.24379920050809678 ], [ 0.031099935288829504, 0.7207084999725498, -0.20834520735740486 ], [ -0.004008432654029212, 2.3775218010654307, 0.5613527845847093 ], ... ],
    	[ [ -1.1088644967938819, 0.3778331381315457, -0.15982855798839518 ], [ -0.20284400478280357, 1.6649647093630329, -0.045210944108350165 ], [ 0.33613604443800355, -0.18516880447131795, 1.631871210757573 ], [ 1.763420098224038, 0.18020817087449947, -0.16061094883647353 ], [ 0.587361175771085, -0.9985057278151798, -0.19564787845824258 ], [ 0.3370897990120905, 0.2886672105576796, -0.7308366572871158 ], [ -0.46125462991809507, -0.1789440925082995, 2.530030471243933 ], [ 0.19433411488102179, 0.21709659142983426, -0.004554490185433589 ], ... ],
    	[ [ 0.0028115789265864806, 0.033991495972759946, -0.40514719862988463 ], [ 0.5929353175081019, -0.28425481255037643, 0.3801129478125433 ], [ -2.1235903420222897, -0.34444894290220573, -0.02953483079447152 ], [ -0.049323482955350405, -0.4975130524439957, 0.10996437352880079 ], [ 0.05496265853122449, 0.5239230199626416, 0.1005174845518511 ], [ -0.03217056029994601, 0.13511582935239813, 0.1222380825662875 ], [ -0.188584003050626, 2.7104159119067903, 0.27097563846097306 ], [ 0.4762781073187163, 0.235832095565595, -2.57988892316001 ], ... ],
    	[ [ -0.446202944709663, 1.4866961170826838, 1.875935650032767 ], [ 0.37368702009822946, 1.6402867911223367, 0.3575109698850913 ], [ 0.025157894190208044, -0.6856269744919542, 0.4868248372049062 ], [ -0.017585832544804343, 0.847793785439913, 1.898198799983966 ], [ 0.016392099297825306, -0.28482900414103385, 0.23977365241359697 ], [ 0.18022016903230056, 0.43262639599804986, -0.012308740721399493 ], [ -0.2267320406800591, 0.11658748419141739, 0.1625630713974821 ], [ -0.02984001103295918, -0.37184103130641655, -0.1902750669461106 ], ... ],
    	...
    ]

Limited-Memory BFGS

Next, we apply the same optimization using L-BFGS, which is nearly ideal for purely second-order or quadratic functions.

TrainingTester.java:509 executed in 39.62 seconds (5.425 gc):

    IterativeTrainer iterativeTrainer = new IterativeTrainer(trainable.addRef());
    try {
      iterativeTrainer.setLineSearchFactory(label -> new ArmijoWolfeSearch());
      iterativeTrainer.setOrientation(new LBFGS());
      iterativeTrainer.setMonitor(TrainingTester.getMonitor(history));
      iterativeTrainer.setTimeout(30, TimeUnit.SECONDS);
      iterativeTrainer.setIterationsPerSample(100);
      iterativeTrainer.setMaxIterations(250);
      iterativeTrainer.setTerminateThreshold(0);
      return iterativeTrainer.run();
    } finally {
      iterativeTrainer.freeRef();
    }
Logging
Reset training subject: 2757839712423
Reset training subject: 2758772171171
Adding measurement 9c507f to history. Total: 0
LBFGS Accumulation History: 1 points
Constructing line search parameters: GD
Non-optimal measurement 3.554075794831487 < 3.554075794831487. Total: 1
th(0)=3.554075794831487;dx=-2.456767219532909E-6
Adding measurement f57f7 to history. Total: 1
New Minimum: 3.554075794831487 > 3.55407050189102
WOLFE (weak): th(2.154434690031884)=3.55407050189102; dx=-2.456763454029632E-6 evalInputDelta=5.292940466805618E-6
Adding measurement 13732ac4 to history. Total: 2
New Minimum: 3.55407050189102 > 3.5540652089586664
WOLFE (weak): th(4.308869380063768)=3.5540652089586664; dx=-2.4567596885309497E-6 evalInputDelta=1.0585872820545461E-5
Adding measurement 74f3d99f to history. Total: 3
New Minimum: 3.5540652089586664 > 3.554044037310374
WOLFE (weak): th(12.926608140191302)=3.554044037310374; dx=-2.456744626582175E-6 evalInputDelta=3.175752111284069E-5
Adding measurement 7920b8d6 to history. Total: 4
New Minimum: 3.554044037310374 > 3.55394876649932
WOLFE (weak): th(51.70643256076521)=3.55394876649932; dx=-2.4566768487225466E-6 evalInputDelta=1.2702833216682308E-4
Adding measurement 46a5114a to history. Total: 5
New Minimum: 3.55394876649932 > 3.553440699896123
WOLFE (weak): th(258.53216280382605)=3.553440699896123; dx=-2.4563153919489498E-6 evalInputDelta=6.350949353639557E-4
Adding measurement 3aa90af3 to history. Total: 6
New Minimum: 3.553440699896123 > 3.550266976928362
WOLFE (weak): th(1551.1929768229563)=3.550266976928362; dx=-2.454057246387296E-6 evalInputDelta=0.003808817903125039
Adding measurement 2e386018 to history. Total: 7
New Minimum: 3.550266976928362 > 3.5275021791004293
WOLFE (weak): th(10858.350837760694)=3.5275021791004293; dx=-2.4378473559825956E-6 evalInputDelta=0.026573615731057654
Adding measurement 449c7e15 to history. Total: 8
New Minimum: 3.5275021791004293 > 3.3471512807343977
WOLFE (weak): th(86866.80670208555)=3.3471512807343977; dx=-2.3086367041962617E-6 evalInputDelta=0.20692451409708923
Adding measurement 3e980dbb to history. Total: 9
New Minimum: 3.3471512807343977 > 2.094449815058231
END: th(781801.26031877)=2.094449815058231; dx=-1.3632877039585771E-6 evalInputDelta=1.4596259797732558
Fitness changed from 3.554075794831487 to 2.094449815058231
Iteration 1 complete. Error: 2.094449815058231 Total: 39.6187; Orientation: 1.6176; Line Search: 35.1926
Final threshold in iteration 1: 2.094449815058231 (> 0.0) after 39.619s (< 30.000s)

Returns

    2.094449815058231

This training apply resulted in the following configuration:

TrainingTester.java:610 executed in 0.00 seconds (0.000 gc):

    RefList<double[]> state = network.state();
    assert state != null;
    String description = state.stream().map(RefArrays::toString).reduce((a, b) -> a + "\n" + b)
        .orElse("");
    state.freeRef();
    return description;

Returns

    

And regressed input:

TrainingTester.java:622 executed in 0.00 seconds (0.000 gc):

    return RefArrays.stream(RefUtil.addRef(data)).flatMap(x -> {
      return RefArrays.stream(x);
    }).limit(1).map(x -> {
      String temp_18_0015 = x.prettyPrint();
      x.freeRef();
      return temp_18_0015;
    }).reduce((a, b) -> a + "\n" + b).orElse("");

Returns

    [
    	[ [ 1.344264774945441, -0.13154753895252708, 0.005245628964374312 ], [ -0.9916004420625937, 1.2054323077186737, 1.325690394052103 ], [ -0.5691139525127251, -1.265421339372395, 0.21941943972052147 ], [ -0.8127329186758179, -0.7939951568163336, -1.0886802872732353 ], [ -0.44955614911410985, 1.3365659474863727, -1.894271310983701 ], [ 0.6071847787109818, -0.9371473794979388, -1.2588326139177448 ], [ 0.909964913364151, 1.3050025502391918, -0.3361240957428801 ], [ 1.6325103611674034, -0.32790545676975047, 0.12868536419440949 ], ... ],
    	[ [ -1.036256158586689, -0.49289188258695654, -0.587004725717368 ], [ -1.2707328025938032, -0.12820271255625887, 0.527527231648786 ], [ 1.1933662631052029, -0.8967745621242299, -1.506852396737469 ], [ -1.4185351208867936, 0.5199063106082474, -1.8072239019303362 ], [ 0.5357946706857717, 0.9653013396526506, -0.5757016168292926 ], [ -0.5647084740860518, -0.9557196065419874, 1.7978057630086297 ], [ -1.642641862818869, 0.15556844143802448, -1.0689907068182571 ], [ -0.15521477966216943, 1.560804285739631, -0.8390453381486931 ], ... ],
    	[ [ -0.11712736391920744, 1.3101710886509899, 1.4335833386591201 ], [ -0.2905439587008597, -0.17948129619568517, 0.6919024101713702 ], [ -1.6416100602815984, -0.27492380551168805, 0.4638910932958972 ], [ 0.657908125768136, 1.4607440990498177, 0.41713468547149735 ], [ -0.029481359344500184, -0.30033123198192674, -1.190367341945115 ], [ 1.6926196666528708, 1.1543187747355053, 1.255292224462348 ], [ -0.3188625312863202, 0.48007248002090624, -0.6744059307910044 ], [ 1.53788644970773, 0.008160055727311892, 1.3577192925787747 ], ... ],
    	[ [ 0.1874988750614104, 0.699912591430141, -0.13870828151619466 ], [ 1.5846612448884558, 0.536703079356791, 0.03235105420277837 ], [ -0.258049813227802, 0.4675238619427501, -1.263860710952063 ], [ -1.6614050654596193, 1.5722796653457118, 0.9115688140410729 ], [ -1.2999933299339124, 1.6352555210813589, -0.5758912824161754 ], [ 1.3297781677114018, 0.6807708228719966, -0.7506688650101244 ], [ 1.5551426704921276, -0.2691954000257784, -1.182647298591864 ], [ 0.891991072980153, -1.3955985135017197, 1.075376740066979 ], ... ],
    	[ [ 0.9021006356389875, -0.7316013163609989, -0.45163685978005336 ], [ -0.8016229783726881, 0.35433782490527127, 1.4535564075373555 ], [ -0.9368120824280056, 0.10375533758617489, 1.8623957865288436 ], [ 1.009076990341874, -0.1917732104575104, 1.4595240260654625 ], [ 0.79942556616169, 0.016549605684874694, 0.39522233582088945 ], [ 1.1258142653780572, 1.2115179425963505, -1.2140519597396218 ], [ -1.8318680017453624, -0.17769714292044547, -0.8944342853691574 ], [ 1.470480599876948, -1.5803997639179859, 1.3565145736796207 ], ... ],
    	[ [ -1.4421786671457193, -1.8615777541715866, 0.2047305197305382 ], [ 1.0070327630967508, 0.6018205802871701, -1.8755832874663323 ], [ -0.6556227795129896, 1.602605377469726, 1.8543022102692654 ], [ -0.5672204621737601, -0.5361971559613423, 0.25972457673040655 ], [ 0.49642148740720593, -1.5955964207085556, -0.3738231496972255 ], [ -0.3594347783023105, 1.2573689094127345, -1.1929337783305727 ], [ -0.7169837146491255, -0.7238735405992832, 1.8573933666487443 ], [ -1.6746098717601485, -0.15988873997502556, 0.35147293475581043 ], ... ],
    	[ [ 0.19590326049116022, 0.28884209377351455, 1.8766326961239506 ], [ 0.9896148782224325, -0.3857885379808727, 0.19994595661165676 ], [ -1.5477898867512574, 1.0858480482821566, -1.2903916628179077 ], [ -0.4656599115510719, -1.6560998461443948, -0.7887309231734771 ], [ -0.2017642046236362, 0.8984897411098778, -1.494997258521729 ], [ -1.2886243511777606, -0.7143379746674388, 0.6368647831192168 ], [ 0.20141368848916893, 1.639055743443434, 1.3153002552143969 ], [ -0.9522053210817251, -0.15742317424205238, 0.23319686380187152 ], ... ],
    	[ [ 1.004711732873953, 1.2508339410305647, -1.315450740111068 ], [ 0.8393914956657385, -1.4802714579865062, 0.610260038223152 ], [ -0.06142066025061431, 0.9115089744231918, 0.9990790951980765 ], [ -1.2045985448451995, -1.7682124059753448, -0.7583405577570275 ], [ 1.668098508175398, 1.9189859516452825, -0.48821094661299225 ], [ -1.4516977889713902, 1.0762004725723786, -0.11869768412522431 ], [ 1.3889025098941057, -0.8841196346589826, 0.09530183382262665 ], [ 0.05967933407861947, -0.26976755087947357, 0.1114494525468365 ], ... ],
    	...
    ]

To produce the following output:

TrainingTester.java:633 executed in 0.09 seconds (0.000 gc):

    Result[] array = ConstantResult.batchResultArray(pop(RefUtil.addRef(data)));
    @Nullable
    Result eval = layer.eval(array);
    assert eval != null;
    TensorList tensorList = Result.getData(eval);
    String temp_18_0016 = tensorList.stream().limit(1).map(x -> {
      String temp_18_0017 = x.prettyPrint();
      x.freeRef();
      return temp_18_0017;
    }).reduce((a, b) -> a + "\n" + b).orElse("");
    tensorList.freeRef();
    return temp_18_0016;

Returns

    [
    	[ [ 0.8125550474244345, 0.21834705081943898, -0.003882708485070708 ], [ -1.1956343375804812, 0.8146152662426138, 2.4522115107830365 ], [ 0.23322088458589796, -1.7918863697262257, 0.3507979924876409 ], [ -0.9932824745021016, -0.09194594288775589, 0.3412807027708602 ], [ 0.21677001022027376, -0.6651452486133624, -0.1094924350289605 ], [ -0.9195489089824709, -1.6893629670905377, -1.5664594566072416 ], [ 1.7576032459351814, -1.9817664619934514, -0.6061223327796749 ], [ 0.49068360384860255, -0.5443492400027574, -0.11026865502910323 ], ... ],
    	[ [ -0.8503296054069461, 0.3835302587195772, -0.3012699867516131 ], [ 0.7944268293060278, -0.1548688767679607, 0.4559526254885352 ], [ -0.9476027112052298, -1.2931039613719224, -1.4713907167193063 ], [ -0.6915055241658118, 0.5611120807109287, -0.3335420766321503 ], [ 0.4528093712375245, 1.1267196260115415, -0.5883253585422626 ], [ 0.3033496916987891, -1.5561164670596137, -1.6338905695022616 ], [ 1.898501377805742, -0.05248336870630959, -1.1328156926484343 ], [ -0.0011348988962938363, -0.7745620612730866, -0.13054655732296347 ], ... ],
    	[ [ 0.13386712060574216, 1.3064533364901838, 0.6948917155212004 ], [ 0.5332518476886837, 0.2032678675170611, 1.1540642818925724 ], [ 1.998063289158709, 0.4343965258042818, 0.8897988796099989 ], [ 0.8929953260631985, -1.6838317657491533, -0.4058642469585047 ], [ 0.010145142351834514, -0.38046250220836764, 0.9671613107991555 ], [ 1.7736319651103833, -1.5029678085357545, -1.5052579751598225 ], [ -0.026741007650276424, 0.42632966819557444, 1.2451489793179222 ], [ -1.7521764396449402, -0.005613541440783709, -0.2258101098650273 ], ... ],
    	[ [ 0.28874826759457206, 0.7681417512752859, -0.25171107388200964 ], [ -1.4616001200237854, -1.039852739140279, -0.03850748336080192 ], [ 0.4758469481342958, 0.7908176876454011, -0.14846523118916438 ], [ -0.08193568010095229, -0.47937669447399595, -1.3871958439669068 ], [ -0.22851346460621688, 0.6642386368060444, 0.5083203686678047 ], [ 1.1948116888240237, -0.18955304285606236, 0.1432617810875124 ], [ 1.0262314955555825, 0.19485557867562087, 0.7307875200508105 ], [ 1.574810290669896, 1.3944697645553439, 0.01891156556942275 ], ... ],
    	[ [ 0.29884464638842884, 1.0259926249987112, 0.14032706947412174 ], [ -1.5406561820157119, 0.3833075540137657, 0.6779857487764677 ], [ -0.7210184921419444, -0.08710405954557787, 0.3200149750450222 ], [ 1.6423187473188499, 0.04445539131552606, 0.5956009446340577 ], [ -0.07541777692749832, -0.0071304890681924485, -0.3955519708082775 ], [ 0.20482635855363218, -1.467775725232894, 0.2793896917189719 ], [ 0.9549447125333189, -0.21824155810159016, 1.5510403681125076 ], [ 1.7508324644960127, 0.5609886247656486, 1.6205901949688017 ], ... ],
    	[ [ -1.8571015686220755, 1.3430174127337022, -0.15809148218477048 ], [ -1.3332570048167987, 1.1039562569933974, 0.31661525641435057 ], [ -0.9065878467202246, -1.0752784129245554, 1.6549334773828381 ], [ 0.8101338179846413, 0.11047994420590591, 0.3450575672066179 ], [ -0.8204482985038909, -1.1443494095770346, -0.16434273052153195 ], [ 0.23573220745133455, 0.8851006600225123, -0.8703577969210076 ], [ -0.30026016487596485, -0.9107800088194504, 2.039316996948826 ], [ -1.4461643705934497, 0.23464323401197726, 0.30521921606679076 ], ... ],
    	[ [ 1.4501463283683985E-4, 0.03184356513739446, -0.1378494565799803 ], [ 1.2566847939608725, -0.4139930628855208, 0.21339288837632803 ], [ -1.8577954836893653, -0.9746004554348439, -1.8678496349859004 ], [ -0.5074065731430781, 0.017308230494978056, 0.5744051440470703 ], [ -0.1581952573817418, 0.38629263186672136, -1.1990136014111643 ], [ -0.9617464103223248, 0.8407176492359295, 1.0582584500062344 ], [ 0.2871887844673814, 1.7670524415515863, 0.7637707152561602 ], [ 1.1639651481476423, 0.17791376039896933, -0.4234455317137493 ], ... ],
    	[ [ -0.026430084498904743, 0.6367248625708862, 0.753002866670485 ], [ 0.9390215974195528, 1.995793703439195, 0.6119375696924607 ], [ -0.05878383936366887, -1.0237317925387774, 0.872979813538962 ], [ -1.3205666729138588, 1.0364521972144347, 0.8195406468406797 ], [ 1.3674356729305144, -0.016645900189796507, 0.27223731687182906 ], [ 2.3370943849803085, 1.2891629187643134, -0.024843902797957982 ], [ 0.22711702878382495, -1.464554212620702, 0.1330852262610241 ], [ -0.030314841421153295, 0.3728400468339831, 0.17948413512101552 ], ... ],
    	...
    ]

TrainingTester.java:432 executed in 0.14 seconds (0.000 gc):

    return TestUtil.compare(title + " vs Iteration", runs);
Logging
Plotting range=[0.0, -0.07113207350626365], [2.0, 0.32106995872712735]; valueStats=DoubleSummaryStatistics{count=3, sum=5.037822, min=0.848922, average=1.679274, max=2.094450}
Only 1 points for GD
Only 1 points for CjGD
Only 1 points for LBFGS

Returns

Result

TrainingTester.java:435 executed in 0.02 seconds (0.000 gc):

    return TestUtil.compareTime(title + " vs Time", runs);
Logging
Plotting range=[-1.0, -0.07113207350626365], [1.0, 0.32106995872712735]; valueStats=DoubleSummaryStatistics{count=3, sum=5.037822, min=0.848922, average=1.679274, max=2.094450}
Only 1 points for GD
Only 1 points for CjGD
Only 1 points for LBFGS

Returns

Result

Results

TrainingTester.java:255 executed in 0.00 seconds (0.000 gc):

    return grid(inputLearning, modelLearning, completeLearning);

Returns

Result

TrainingTester.java:258 executed in 0.00 seconds (0.000 gc):

    return new ComponentResult(null == inputLearning ? null : inputLearning.value,
        null == modelLearning ? null : modelLearning.value, null == completeLearning ? null : completeLearning.value);

Returns

    {"input":{ "LBFGS": { "type": "NonConverged", "value": 2.094449815058231 }, "CjGD": { "type": "NonConverged", "value": 0.8489222696122312 }, "GD": { "type": "NonConverged", "value": 2.094449815058231 } }, "model":null, "complete":null}

LayerTests.java:425 executed in 0.00 seconds (0.000 gc):

    throwException(exceptions.addRef());

Results

detailsresult
{"input":{ "LBFGS": { "type": "NonConverged", "value": 2.094449815058231 }, "CjGD": { "type": "NonConverged", "value": 0.8489222696122312 }, "GD": { "type": "NonConverged", "value": 2.094449815058231 } }, "model":null, "complete":null}OK
  {
    "result": "OK",
    "performance": {
      "execution_time": "151.884",
      "gc_time": "14.863"
    },
    "created_on": 1586737278290,
    "file_name": "trainingTest",
    "report": {
      "simpleName": "Double",
      "canonicalName": "com.simiacryptus.mindseye.layers.cudnn.NProductLayerTest.Double",
      "link": "https://github.com/SimiaCryptus/mindseye-cudnn/tree/59d5b3318556370acb2d83ee6ec123ce0fc6974f/src/test/java/com/simiacryptus/mindseye/layers/cudnn/NProductLayerTest.java",
      "javaDoc": ""
    },
    "training_analysis": {
      "input": {
        "LBFGS": {
          "type": "NonConverged",
          "value": 2.094449815058231
        },
        "CjGD": {
          "type": "NonConverged",
          "value": 0.8489222696122312
        },
        "GD": {
          "type": "NonConverged",
          "value": 2.094449815058231
        }
      }
    },
    "archive": "s3://code.simiacrypt.us/tests/com/simiacryptus/mindseye/layers/cudnn/NProductLayer/Double/trainingTest/202004132118",
    "id": "22567c8f-59fc-493c-ab57-4b014a0297ac",
    "report_type": "Components",
    "display_name": "Comparative Training",
    "target": {
      "simpleName": "NProductLayer",
      "canonicalName": "com.simiacryptus.mindseye.layers.cudnn.NProductLayer",
      "link": "https://github.com/SimiaCryptus/mindseye-cudnn/tree/59d5b3318556370acb2d83ee6ec123ce0fc6974f/src/main/java/com/simiacryptus/mindseye/layers/cudnn/NProductLayer.java",
      "javaDoc": ""
    }
  }