1. Test Modules
  2. Training Characteristics
    1. Input Learning
      1. Gradient Descent
      2. Conjugate Gradient Descent
      3. Limited-Memory BFGS
    2. Results
  3. Results

Subreport: Logs for com.simiacryptus.ref.lang.ReferenceCountingBase

Test Modules

Using Seed 8405804493861860352

Training Characteristics

Input Learning

In this apply, we use a network to learn this target input, given it's pre-evaluated output:

TrainingTester.java:332 executed in 0.06 seconds (0.000 gc):

    return RefArrays.stream(RefUtil.addRef(input_target)).flatMap(RefArrays::stream).map(x -> {
      try {
        return x.prettyPrint();
      } finally {
        x.freeRef();
      }
    }).reduce((a, b) -> a + "\n" + b).orElse("");

Returns

    [
    	[ [ 0.372, 0.488, 1.72, -1.972, 0.772, 1.024, -1.748, -1.712, ... ], [ 0.628, 1.52, 1.264, 1.632, 1.104, -1.028, -0.016, -0.304, ... ], [ -0.336, 1.912, -0.144, 0.416, 0.448, -0.108, 1.016, -0.384, ... ], [ -0.396, 0.396, 0.248, 1.768, 0.152, 0.62, -1.484, -0.112, ... ], [ -1.132, 0.912, 1.952, 0.244, -1.332, 0.704, -1.108, -0.556, ... ], [ -1.912, -1.436, -0.932, 1.54, -0.508, -1.052, -1.372, -1.328, ... ], [ -0.98, 1.38, 0.316, -0.236, -1.864, 0.092, 0.564, 0.188, ... ], [ -1.068, -1.38, -1.928, 1.52, 0.792, -1.596, -0.784, -1.42, ... ], ... ],
    	[ [ 1.104, -1.74, 1.212, 1.06, 1.716, -1.74, 1.588, -1.344, ... ], [ 1.504, 0.732, 1.224, -1.152, 1.22, -0.984, 1.424, 1.428, ... ], [ 0.136, -1.852, 1.232, 1.216, 0.252, 1.04, 0.624, -0.668, ... ], [ -0.492, -0.404, -0.616, -1.192, -1.284, -1.308, 0.932, -0.648, ... ], [ 1.088, -1.492, 0.452, 0.6, -0.54, 1.212, -1.084, 0.496, ... ], [ -0.432, 0.092, 0.488, 0.684, -1.212, 1.416, 1.708, 0.432, ... ], [ 0.084, -1.052, 0.896, -0.392, -0.792, 0.016, -0.332, 1.908, ... ], [ 1.316, 0.224, -1.524, 1.556, -1.688, 1.336, -0.888, 1.16, ... ], ... ],
    	[ [ 0.272, -0.036, -0.084, 1.98, 1.264, 0.852, -1.06, -0.72, ... ], [ -0.832, 0.132, 0.46, 1.208, -0.948, 1.232, 1.42, 1.712, ... ], [ -1.168, 0.728, 0.516, -1.556, -1.38, -0.368, 1.7, 0.052, ... ], [ 0.284, -1.984, 1.628, 1.664, 1.08, 0.296, -0.136, 0.476, ... ], [ -0.5, -1.02, -1.252, -1.376, -1.916, 1.092, -0.576, -0.044, ... ], [ -1.564, 0.208, -0.94, -0.496, 1.24, 1.28, 1.504, -0.5, ... ], [ -1.592, -0.984, 0.688, -1.244, -1.264, -1.7, -1.544, -0.156, ... ], [ -1.372, 0.828, -0.464, -0.252, 0.152, -0.768, -1.592, 0.424, ... ], ... ],
    	[ [ -1.772, 1.876, 1.156, -1.368, -1.072, 0.624, 0.732, 0.64, ... ], [ -0.428, -1.224, 1.416, -1.692, 0.66, 0.636, -1.892, -1.264, ... ], [ -1.26, -0.048, 1.06, 1.256, -1.252, -0.988, 1.276, 1.688, ... ], [ -1.256, -0.096, -0.2, -0.712, -1.772, 1.932, 0.688, 0.34, ... ], [ 1.34, -0.632, -0.828, -0.812, 1.676, 0.916, -1.744, 1.432, ... ], [ -0.932, 1.848, -1.712, 0.816, 1.376, -0.168, -1.836, 0.996, ... ], [ -1.348, 0.42, -0.74, 1.852, 1.924, 0.312, -1.776, -1.952, ... ], [ -1.176, 1.944, 1.58, -0.336, 0.92, 0.676, 0.864, -0.9, ... ], ... ],
    	[ [ -0.584, -1.804, 1.796, 1.152, -1.74, -1.08, -0.516, 1.156, ... ], [ -0.748, 1.88, 0.584, -1.22, -0.452, 0.232, 1.156, 1.564, ... ], [ 1.324, 0.532, -0.528, 0.896, -0.508, 0.8, 1.836, -1.368, ... ], [ -0.832, 1.868, 0.612, -1.564, 0.268, 1.632, 1.64, -0.168, ... ], [ 0.708, -1.848, -0.208, -1.56, 0.292, 1.888, 1.88, 1.336, ... ], [ 1.928, -0.9, 1.22, -0.396, 1.292, 0.02, -0.008, 1.96, ... ], [ -1.064, 1.856, 0.688, 0.864, 0.744, 1.228, -1.036, -1.888, ... ], [ -1.364, -1.304, -1.856, -1.276, -1.964, 1.804, 1.344, -0.228, ... ], ... ],
    	[ [ 1.58, 1.732, 1.236, 1.616, -0.448, 1.208, 0.692, 1.032, ... ], [ -1.768, 0.22, -1.792, 1.484, 1.024, 0.016, -0.308, 1.264, ... ], [ -0.74, -1.504, 0.104, 1.476, -1.516, 0.756, -0.28, 1.44, ... ], [ -0.784, 0.272, -1.744, -0.66, -1.776, -0.156, -0.7, -0.904, ... ], [ -0.688, -0.676, 0.0, -0.82, 1.396, -0.064, 0.236, -0.084, ... ], [ 0.244, 1.048, -0.252, 1.388, 1.628, 0.164, 0.116, 0.472, ... ], [ 1.064, -0.04, 0.14, -1.3, -1.008, 1.64, -1.788, -0.356, ... ], [ -0.332, -1.432, -1.14, -1.724, 1.24, 0.812, 1.752, 1.02, ... ], ... ],
    	[ [ 0.528, 1.976, 1.068, -1.132, 1.948, -0.644, -1.524, -1.648, ... ], [ 0.26, 0.124, 1.696, -1.172, -1.948, -0.7, 1.488, 0.136, ... ], [ 0.68, 1.164, 1.74, 1.488, 1.316, -0.952, -0.724, -0.348, ... ], [ 0.412, 0.996, -1.984, -0.056, 1.7, 1.908, -0.664, -0.28, ... ], [ 0.224, -0.196, -0.168, 0.44, 1.972, -2.0, 1.276, -1.384, ... ], [ -1.116, 0.464, 0.656, 0.528, -0.06, 0.228, -0.668, -1.104, ... ], [ -1.836, 1.464, -0.432, -1.812, -0.844, -1.012, 0.644, -0.968, ... ], [ -1.964, -1.576, 1.032, -0.652, 1.488, -1.66, -0.328, 0.796, ... ], ... ],
    	[ [ 0.576, -1.264, 1.48, 0.312, 0.976, -0.124, -0.196, 1.212, ... ], [ -1.316, -1.616, 1.404, -0.08, -0.06, -0.372, 0.936, -0.732, ... ], [ -1.212, 0.98, -0.556, -0.248, 1.06, 1.356, -0.684, -1.78, ... ], [ -0.984, -0.148, -1.136, -0.8, 1.72, 0.984, -1.936, -0.264, ... ], [ -1.38, -1.22, 1.736, 1.496, 0.724, -1.612, -1.488, 0.804, ... ], [ 0.236, -0.788, 0.184, 0.336, -0.92, 1.064, 1.276, -1.452, ... ], [ 1.116, 1.836, 0.388, -1.288, 0.088, -0.868, -0.592, -1.736, ... ], [ 1.692, 0.236, -1.272, -0.016, -0.996, 1.52, 1.328, 1.456, ... ], ... ],
    	...
    ]
    [
    	[ [ 1.1, 1.28, 1.108, -1.088, -1.408, 1.984, 1.108, -0.336, ... ], [ 1.34, -0.336, -0.656, -0.58, -0.844, -1.42, -1.756, 1.34, ... ], [ 1.068, 0.784, 1.508, 0.996, -1.312, 0.5, 0.668, -0.732, ... ], [ -0.44, 0.756, -0.496, -1.332, -1.164, 1.992, -1.408, -1.644, ... ], [ 1.892, -0.548, 0.528, 1.912, 0.012, 1.764, 0.456, -1.216, ... ], [ 0.072, 1.052, 0.548, -1.016, -0.636, 1.48, -1.896, 0.824, ... ], [ -0.3, -0.028, -1.128, 0.46, -1.176, -1.044, -0.552, 0.964, ... ], [ -1.932, -0.6, 0.544, -1.42, 0.804, 1.308, -1.08, -0.712, ... ], ... ],
    	[ [ -0.5, 1.716, 1.364, -0.46, 0.364, 1.832, -1.824, 1.072, ... ], [ 0.336, 0.656, -1.236, 0.964, 0.02, 0.592, 1.788, -0.944, ... ], [ 0.684, 1.888, 1.1, -0.78, 0.164, 1.28, 1.512, -1.996, ... ], [ 1.368, -0.972, 0.0, 0.756, -0.872, -0.38, -1.196, 1.704, ... ], [ 0.856, -1.212, -1.044, -1.956, -0.644, 0.14, -0.016, -1.376, ... ], [ -0.536, 1.624, -0.532, -0.568, -0.4, 0.912, -1.812, 1.556, ... ], [ -1.164, -1.856, -0.604, -1.796, -0.564, 0.268, 1.868, -0.04, ... ], [ -1.576, 0.868, 1.692, 1.86, -1.236, -1.344, -0.92, -1.156, ... ], ... ],
    	[ [ 1.408, -1.896, -1.732, 1.552, 0.744, -1.852, 0.856, 0.22, ... ], [ 0.444, 0.828, 1.488, 0.464, 1.324, -1.444, 1.372, 1.98, ... ], [ -1.444, -0.812, -0.68, -0.78, 1.612, 1.604, 1.752, -1.136, ... ], [ 0.08, -0.428, -0.904, -1.4, -1.992, -0.444, -0.028, -1.608, ... ], [ -0.276, -0.368, 1.536, 1.3, -1.708, 1.268, 0.548, 1.048, ... ], [ 1.196, 0.332, 1.124, -0.96, -0.932, -0.716, -1.708, -0.804, ... ], [ -1.248, 1.16, -1.632, -0.832, 0.368, -0.516, 1.976, -0.116, ... ], [ 1.484, -1.824, 1.6, 0.428, 1.268, -0.452, -0.748, 0.528, ... ], ... ],
    	[ [ 0.152, -1.828, -0.224, 0.46, 0.048, -1.052, 0.956, 1.768, ... ], [ -0.864, 1.456, -0.864, 0.872, 1.532, -0.836, 0.952, 1.428, ... ], [ -0.296, 0.748, -1.508, -1.884, 1.184, 0.284, -0.86, 0.128, ... ], [ -1.984, -0.464, 0.92, 0.112, 0.276, 0.92, -1.936, -1.448, ... ], [ 0.564, -0.552, 1.24, -0.02, 1.756, 1.08, -0.008, -1.104, ... ], [ -0.504, 1.948, -1.408, -0.336, 1.572, 0.704, 1.448, 1.228, ... ], [ 1.708, -1.236, -1.616, 1.616, 0.852, -0.136, 0.632, -0.112, ... ], [ -1.048, 1.132, 0.888, -0.128, 0.964, 0.448, 0.764, 1.12, ... ], ... ],
    	[ [ 0.82, -1.804, -1.752, 0.208, 0.356, -0.348, -1.568, -1.736, ... ], [ -1.724, -0.808, 1.68, 0.208, -1.284, -1.092, 0.064, 1.88, ... ], [ -1.484, -0.256, -1.528, -0.84, -1.892, -0.188, 0.052, -0.692, ... ], [ -0.516, -1.168, 1.892, -1.228, 0.352, 1.288, -1.92, 1.812, ... ], [ -1.672, -1.92, -0.92, 0.164, 0.232, 0.364, 1.14, -0.78, ... ], [ -0.42, -0.136, -0.44, -0.792, -1.488, 1.516, 1.404, 0.256, ... ], [ 1.46, 0.348, 1.224, 1.248, -1.208, -0.392, -1.22, 0.436, ... ], [ 0.28, -0.676, 0.276, -0.728, 1.668, -1.944, 1.808, -1.42, ... ], ... ],
    	[ [ -0.092, -0.98, -1.004, 0.808, -1.216, 0.456, -1.5, -1.048, ... ], [ -0.568, -0.312, -1.0, 0.128, 0.924, 0.46, -0.66, 0.752, ... ], [ 1.02, 0.972, 0.288, -0.828, -1.716, 0.356, -0.116, -0.364, ... ], [ -1.804, 0.212, 0.552, 1.836, 1.44, -0.764, 1.464, 0.156, ... ], [ -1.204, 0.224, -1.98, 1.144, -1.772, -1.988, 1.304, 0.42, ... ], [ 1.452, -1.776, -1.516, -1.868, -0.172, -0.052, -0.156, -1.908, ... ], [ 0.12, 1.408, 0.196, 0.136, -1.92, 0.772, 0.256, -0.992, ... ], [ -1.504, 1.532, -0.532, -1.668, -0.152, 1.712, 0.412, -1.7, ... ], ... ],
    	[ [ 1.148, 0.88, -0.24, -1.216, 0.092, -0.32, -1.432, -0.476, ... ], [ -0.236, -0.316, 1.524, 0.3, -1.916, -0.608, -1.804, 0.96, ... ], [ -0.896, -1.228, 1.96, -1.672, 0.076, -1.388, -1.648, -1.772, ... ], [ -1.54, 0.192, 1.392, -0.172, 0.032, -1.004, 1.056, -1.176, ... ], [ -1.856, 1.944, 0.66, 1.224, 1.948, -0.452, 1.5, -0.992, ... ], [ 0.82, 0.108, 1.912, -0.772, 0.816, 1.512, 0.84, -0.176, ... ], [ 0.468, -1.844, 1.432, -0.504, -1.224, -1.32, -0.724, 1.788, ... ], [ -1.012, 0.44, 1.404, -0.852, 1.2, 

...skipping 5571 bytes...

    0.252, 0.252, -1.328, -1.164, ... ], [ -1.276, 0.928, 0.84, -0.556, 0.784, -1.828, 0.376, 1.78, ... ], [ -1.632, -1.8, 0.864, -1.668, -0.192, 0.004, -0.888, -0.304, ... ], [ 0.708, -1.744, -0.688, 0.968, -0.688, 0.8, -1.924, 1.348, ... ], [ -1.52, 1.716, 0.512, -1.736, -1.472, 0.824, 1.816, -0.428, ... ], [ 1.0, -1.56, 1.608, 0.436, -0.244, -0.54, -0.496, -0.872, ... ], [ -0.076, -0.056, 0.9, -1.308, -0.464, -1.476, 0.476, -0.424, ... ], [ -1.884, 1.812, 0.056, -0.436, -0.308, 0.872, -1.236, -0.42, ... ], ... ],
    	[ [ -0.228, -0.204, -0.504, 1.916, 1.564, -0.516, -1.44, 1.732, ... ], [ -1.056, 1.364, 1.98, -1.032, 1.488, -0.268, -1.752, 1.172, ... ], [ 1.772, 0.652, -1.092, -0.544, 0.672, -1.84, 1.872, 1.232, ... ], [ 1.672, 1.556, -1.116, 1.612, 0.56, 0.108, 1.224, -1.124, ... ], [ 0.212, -1.62, -1.944, 0.892, 1.352, 1.808, 0.872, -1.432, ... ], [ -1.424, -0.848, 0.508, -1.108, -0.256, 0.36, -1.624, 1.868, ... ], [ -0.304, 0.292, -0.164, 0.216, -0.952, -1.128, 1.96, -1.208, ... ], [ -1.408, 1.764, 1.408, 1.872, 0.984, 1.888, 0.192, 1.792, ... ], ... ],
    	[ [ -1.344, 1.936, 1.368, -1.652, 1.604, 1.012, 0.5, 1.852, ... ], [ 0.46, 1.816, -0.092, -1.972, 1.148, -0.648, 0.3, -1.496, ... ], [ -1.78, -1.524, -0.952, -1.0, 0.0, -1.856, -1.18, -1.168, ... ], [ 0.012, 0.652, 0.492, -0.696, -0.04, 1.704, -0.312, -1.812, ... ], [ 0.2, 1.908, -0.052, -1.24, -1.768, -1.72, -1.048, -1.62, ... ], [ 0.652, 1.576, -0.956, 0.036, 0.608, 0.144, -0.852, 1.68, ... ], [ -1.632, 1.536, -1.168, 1.064, 1.504, 1.212, -0.892, -0.924, ... ], [ -0.932, -1.104, 2.0, 0.076, -1.5, -0.804, -0.504, -0.056, ... ], ... ],
    	[ [ 0.708, -1.564, 1.876, 0.412, 1.564, 0.008, 1.276, 1.808, ... ], [ 0.908, -0.524, -1.792, -0.82, 1.3, 0.308, 1.592, 0.264, ... ], [ -0.344, -1.34, -1.696, -1.648, 0.488, -0.936, -1.188, 1.456, ... ], [ 1.4, -1.272, -0.728, 0.412, -1.548, -1.88, -1.172, -1.5, ... ], [ 1.116, -0.068, -1.26, -0.028, 0.44, -0.264, -0.44, -0.652, ... ], [ 0.332, 0.084, -1.576, -0.964, -1.5, 1.792, -1.992, 0.732, ... ], [ -1.356, 0.808, -0.728, -0.076, 0.74, 0.784, 1.92, -1.564, ... ], [ -0.44, -0.512, 0.416, -0.732, -1.504, -1.472, -1.448, -1.888, ... ], ... ],
    	[ [ -1.652, 0.772, 0.484, -1.188, -0.328, -0.992, 1.636, 1.396, ... ], [ 1.912, 0.968, -1.64, 0.28, 0.196, 0.516, 0.572, 1.94, ... ], [ -0.62, 1.38, -1.328, 0.284, 0.128, -0.996, -0.06, -0.316, ... ], [ 0.492, -0.844, 1.136, -0.048, -1.98, 0.296, -0.688, -1.792, ... ], [ 1.956, -0.488, -0.128, 0.864, 0.46, -1.448, -1.248, -0.476, ... ], [ 0.108, -0.392, 0.624, 1.456, 1.06, -1.812, -0.028, 1.836, ... ], [ -0.516, 0.992, 0.74, -1.664, 0.972, 1.672, -0.62, -0.636, ... ], [ 1.148, -1.04, 0.352, -0.008, 0.392, 1.368, -1.836, -1.288, ... ], ... ],
    	[ [ -0.152, 1.796, -0.188, -1.396, 0.976, 1.836, 1.996, -0.244, ... ], [ 0.184, 1.912, -0.248, 0.664, -0.068, -1.752, -0.916, 1.78, ... ], [ -0.824, -0.644, 1.04, -0.92, -0.7, 1.876, -0.356, -1.952, ... ], [ 1.276, -0.396, 0.568, -0.808, -1.716, -1.864, 0.38, -0.16, ... ], [ -1.592, -1.376, 0.412, -1.768, 1.456, -1.784, 1.808, -1.22, ... ], [ -1.076, -1.704, -1.464, 0.86, 0.056, -0.172, 1.96, -0.016, ... ], [ 0.76, 0.98, 0.16, 0.54, 0.46, -0.112, -0.364, 0.584, ... ], [ 1.304, -0.916, 0.752, -0.352, 0.024, 0.512, 1.248, 0.072, ... ], ... ],
    	[ [ 1.128, -1.468, 0.964, -1.088, -0.756, -0.68, 0.164, -0.16, ... ], [ -1.932, -0.876, 0.264, 0.768, -0.904, -0.404, -1.032, 1.624, ... ], [ -1.424, 1.512, 0.284, 1.408, 0.672, -0.54, 0.82, 1.556, ... ], [ -0.104, 1.34, 1.62, 1.872, -0.324, 1.616, 1.988, 0.52, ... ], [ -1.26, -0.456, 0.912, 0.216, 1.208, -1.26, 1.332, -1.736, ... ], [ 1.38, -0.92, 1.7, 0.396, 0.844, -0.668, 0.712, -0.908, ... ], [ 1.744, 1.068, 0.548, 0.74, 0.404, 0.636, -1.824, 0.512, ... ], [ -0.208, -1.624, 1.52, 0.876, 0.744, 1.952, -1.02, -0.036, ... ], ... ],
    	...
    ]
    [
    	[ [ 0.04, 0.3, -1.28, 0.428, -0.64, -0.424, -1.336, -0.924, ... ], [ -0.28, -0.248, -0.728, -0.96, -1.992, -0.664, -0.792, 1.868, ... ], [ 1.92, 0.736, 1.008, -0.964, -0.096, 1.072, 1.912, -0.444, ... ], [ 1.496, 0.436, 0.412, -1.068, 1.36, -0.576, -1.964, 0.56, ... ], [ -0.112, -1.42, 0.212, 0.956, -0.204, -1.9, -0.348, 1.636, ... ], [ 0.04, -1.508, -1.548, -0.952, 0.784, 0.804, 0.284, 0.592, ... ], [ 1.768, -0.948, -1.384, 0.664, 0.652, 1.208, -1.764, 0.552, ... ], [ 1.244, 1.148, -1.808, -1.948, -1.124, -0.372, 1.524, 1.556, ... ], ... ],
    	[ [ 1.944, 1.36, 1.58, 0.628, -1.584, -1.644, 0.884, 1.0, ... ], [ -0.856, -0.536, 0.076, 0.836, -1.124, 1.468, 0.58, -1.404, ... ], [ 1.896, -0.84, 0.408, -1.964, 0.104, 1.512, 1.376, -0.604, ... ], [ 0.64, -1.716, -1.872, 1.732, -1.124, -0.144, 0.68, 0.488, ... ], [ 0.284, 0.184, -1.924, 0.324, -1.356, -1.964, 0.668, -1.708, ... ], [ 0.532, 1.34, 1.34, 1.736, 0.128, 1.372, -0.116, -1.088, ... ], [ 1.816, 1.92, 0.288, 0.564, -0.252, -1.912, 1.708, -0.776, ... ], [ -0.44, -0.856, 1.892, -0.356, 0.228, 1.536, -0.98, -0.412, ... ], ... ],
    	[ [ 1.272, -1.176, 0.32, 0.452, -1.288, -1.78, 1.032, 0.648, ... ], [ 1.352, 0.32, 1.368, -0.936, -0.556, -1.156, -0.568, -1.696, ... ], [ 0.376, 0.604, -0.396, 0.468, 0.908, -0.2, 0.772, 1.124, ... ], [ 1.932, 0.048, -0.612, 1.64, 0.112, -0.1, -0.58, -1.556, ... ], [ -0.448, -0.408, -0.94, 1.288, -1.924, 0.036, 1.156, 0.16, ... ], [ -1.964, -0.708, 0.968, 0.504, -0.86, 1.96, 1.712, -0.892, ... ], [ -1.1, 1.772, 1.752, 1.828, -0.476, 0.188, -0.188, 0.588, ... ], [ 0.712, 1.056, -0.192, 1.076, -1.224, 1.496, -1.82, 0.168, ... ], ... ],
    	[ [ -0.24, 1.224, -0.068, -1.052, 1.12, -1.356, 0.264, -1.76, ... ], [ 1.628, 1.58, 0.444, 1.152, 0.612, 1.916, 0.992, 1.992, ... ], [ -0.684, -1.424, -1.476, -0.824, -1.688, 1.26, -0.396, 0.452, ... ], [ 1.404, 0.144, -1.016, 1.164, 0.404, 0.416, 0.396, -0.84, ... ], [ -1.328, -1.388, -1.716, 1.772, 0.844, 1.228, 0.936, 1.084, ... ], [ -1.008, -0.74, 1.8, 0.8, -1.204, 0.58, 0.212, -0.296, ... ], [ 0.664, -0.836, 0.212, 0.716, 0.536, -1.856, 1.84, -0.12, ... ], [ -1.632, 1.304, -0.532, -1.464, -1.336, -1.372, -0.316, -0.712, ... ], ... ],
    	[ [ -0.528, -0.724, -0.168, 0.312, -0.948, -0.984, -0.296, 0.544, ... ], [ 0.42, 1.812, -1.584, 1.632, 0.664, 0.316, -1.612, 1.508, ... ], [ 0.66, 1.696, 1.672, 1.316, 0.444, -0.584, 0.616, 0.992, ... ], [ -0.628, 0.688, -1.376, -0.616, -0.096, 0.524, -1.72, 1.912, ... ], [ 1.884, -1.456, 0.724, 0.48, -1.744, -1.588, 0.896, 1.132, ... ], [ 0.928, 1.448, 1.292, -0.804, 0.732, -0.052, 0.44, 0.464, ... ], [ -0.58, -1.456, -1.332, -1.472, 0.992, -0.124, 1.204, 1.42, ... ], [ -1.584, -1.248, 1.136, 0.92, 0.108, 1.044, -0.7, -0.804, ... ], ... ],
    	[ [ 0.788, 1.024, 1.78, 1.404, 1.256, -0.44, 0.428, -1.232, ... ], [ 1.284, -0.256, -0.376, 1.252, 1.484, -0.972, -0.48, -1.804, ... ], [ 0.284, 1.872, -1.148, 1.212, 1.36, 1.02, -0.42, 1.216, ... ], [ 0.16, 0.22, -1.256, 0.864, -0.132, -1.072, -0.792, -0.124, ... ], [ 0.764, -1.208, 1.412, 0.848, 0.792, 0.724, -0.824, -1.572, ... ], [ -0.24, -1.228, 1.548, -1.092, 0.296, 0.856, 1.308, -0.58, ... ], [ -1.244, 0.068, -1.572, 1.824, 0.86, 0.384, -1.048, -0.884, ... ], [ -1.544, 1.384, -1.984, -1.892, 1.612, -0.036, 0.268, -1.088, ... ], ... ],
    	[ [ -1.564, 0.116, 1.224, -1.072, -1.756, 0.9, 0.764, 1.132, ... ], [ 0.176, 1.504, 1.2, -1.172, -1.908, 1.264, 0.504, -0.74, ... ], [ -0.8, 0.924, -1.584, -0.028, -0.516, -1.4, -1.06, -0.24, ... ], [ -0.252, 1.652, 0.72, 0.552, -0.62, 1.404, 0.24, -1.084, ... ], [ -0.78, -1.124, -1.688, -0.62, -1.768, -1.524, -0.636, -1.116, ... ], [ 0.348, 0.792, 0.668, -0.656, -1.828, -1.88, 0.524, 1.432, ... ], [ 1.772, 1.632, -0.112, -0.588, -0.696, 0.224, -0.76, 1.424, ... ], [ 0.308, -0.052, -1.268, 1.048, -1.508, -0.44, -1.5, -1.432, ... ], ... ],
    	[ [ -1.516, -0.352, 1.048, 1.792, -0.192, 0.976, 0.84, 0.568, ... ], [ -1.808, -1.688, -1.304, -1.768, -0.628, -1.08, 1.768, 1.648, ... ], [ -1.652, 1.528, 1.612, -0.612, -1.84, -1.264, -1.092, 0.488, ... ], [ 0.636, -1.804, -1.66, 1.496, 1.236, 1.604, -1.292, -1.388, ... ], [ 1.408, -0.148, -1.328, -1.532, 0.624, -1.932, -1.776, -1.32, ... ], [ 1.552, 0.996, -0.108, -0.408, -1.224, -0.48, -1.456, -0.164, ... ], [ 1.228, -0.656, 1.872, 1.084, 0.368, -1.52, 0.428, 1.428, ... ], [ 1.64, -0.14, 0.56, 1.24, -1.252, -1.656, 1.244, 0.804, ... ], ... ],
    	...
    ]

Gradient Descent

First, we train using basic gradient descent method apply weak line search conditions.

TrainingTester.java:480 executed in 46.76 seconds (5.921 gc):

    IterativeTrainer iterativeTrainer = new IterativeTrainer(trainable.addRef());
    try {
      iterativeTrainer.setLineSearchFactory(label -> new ArmijoWolfeSearch());
      iterativeTrainer.setOrientation(new GradientDescent());
      iterativeTrainer.setMonitor(TrainingTester.getMonitor(history));
      iterativeTrainer.setTimeout(30, TimeUnit.SECONDS);
      iterativeTrainer.setMaxIterations(250);
      iterativeTrainer.setTerminateThreshold(0);
      return iterativeTrainer.run();
    } finally {
      iterativeTrainer.freeRef();
    }
Logging
Reset training subject: 1548888328710
Reset training subject: 1550929629144
Constructing line search parameters: GD
th(0)=0.8532350811317408;dx=-2.666359628701323E-7
New Minimum: 0.8532350811317408 > 0.8532345069472168
WOLFE (weak): th(2.154434690031884)=0.8532345069472168; dx=-2.66413607531703E-7 evalInputDelta=5.741845240381238E-7
New Minimum: 0.8532345069472168 > 0.8532339329507661
WOLFE (weak): th(4.308869380063768)=0.8532339329507661; dx=-2.664128440372772E-7 evalInputDelta=1.1481809747415639E-6
New Minimum: 0.8532339329507661 > 0.8532316371014387
WOLFE (weak): th(12.926608140191302)=0.8532316371014387; dx=-2.6641237333092883E-7 evalInputDelta=3.444030302168777E-6
New Minimum: 0.8532316371014387 > 0.8532213056953497
WOLFE (weak): th(51.70643256076521)=0.8532213056953497; dx=-2.6641075477764104E-7 evalInputDelta=1.3775436391139095E-5
New Minimum: 0.8532213056953497 > 0.8531662059918895
WOLFE (weak): th(258.53216280382605)=0.8531662059918895; dx=-2.664021451475316E-7 evalInputDelta=6.887513985132099E-5
New Minimum: 0.8531662059918895 > 0.8528218731458891
WOLFE (weak): th(1551.1929768229563)=0.8528218731458891; dx=-2.6634833557437944E-7 evalInputDelta=4.132079858517601E-4
New Minimum: 0.8528218731458891 > 0.8503447964115418
WOLFE (weak): th(10858.350837760694)=0.8503447964115418; dx=-2.659075082728627E-7 evalInputDelta=0.0028902847201990323
New Minimum: 0.8503447964115418 > 0.8303317003498624
WOLFE (weak): th(86866.80670208555)=0.8303317003498624; dx=-2.607354484157113E-7 evalInputDelta=0.02290338078187848
New Minimum: 0.8303317003498624 > 0.6647611271400282
END: th(781801.26031877)=0.6647611271400282; dx=-2.1673950303447227E-7 evalInputDelta=0.18847395399171263
Fitness changed from 0.8532350811317408 to 0.6647611271400282
Iteration 1 complete. Error: 0.6647611271400282 Total: 46.7429; Orientation: 2.6974; Line Search: 38.1497
Final threshold in iteration 1: 0.6647611271400282 (> 0.0) after 46.745s (< 30.000s)

Returns

    0.6647611271400282

This training apply resulted in the following configuration:

TrainingTester.java:610 executed in 0.00 seconds (0.000 gc):

    RefList<double[]> state = network.state();
    assert state != null;
    String description = state.stream().map(RefArrays::toString).reduce((a, b) -> a + "\n" + b)
        .orElse("");
    state.freeRef();
    return description;

Returns

    

And regressed input:

TrainingTester.java:622 executed in 0.01 seconds (0.000 gc):

    return RefArrays.stream(RefUtil.addRef(data)).flatMap(x -> {
      return RefArrays.stream(x);
    }).limit(1).map(x -> {
      String temp_18_0015 = x.prettyPrint();
      x.freeRef();
      return temp_18_0015;
    }).reduce((a, b) -> a + "\n" + b).orElse("");

Returns

    [
    	[ [ -0.908, 0.032, -1.812, -0.272, 0.66, 0.224, 0.492, -0.564, ... ], [ -0.644, 0.7615431649881349, -1.42, -1.232, -0.592, -0.512, 1.7425004660621188, 1.1225104339233998, ... ], [ -1.968, -1.708, -0.392, -0.852, -1.872, -0.016, -0.384, -1.916, ... ], [ -0.528, -1.068, 0.7755213516850208, 0.432, 0.048, 1.4543421579747673, 0.668, 1.4540728924592168, ... ], [ 0.6455668558718008, -1.94, -1.712, 1.2766469287076372, 1.2160847423450754, 0.036, -1.644, -1.152, ... ], [ -0.588, -1.84, -0.572, 1.144, -1.584, 0.4960199534986645, -1.192, 1.0262175041883372, ... ], [ -1.788, 0.296, 1.204, -0.688, 1.740071037098793, 0.372, 1.6665714898288349, -1.596, ... ], [ -0.1903795407726389, 1.68, 1.7808662971444493, 1.268, 1.772, -1.044, -1.224, -1.544, ... ], ... ],
    	[ [ 1.3354540275290885, -0.408, -1.492, -1.356, 1.5755450270145972, 0.6833967112306677, 0.016, -1.604, ... ], [ -0.52, -0.228, 0.7017014819635377, 0.22745031014220224, -1.164, 0.104, -1.124, -1.0, ... ], [ 1.691538780984228, 0.9288152135443211, -1.756, -0.84, 0.5568525985642054, 0.424, -0.42, 0.036, ... ], [ 1.796, 0.144, -1.768, 1.4941128172326483, -0.524, -1.984, 1.7849909642629564, 0.444, ... ], [ -1.656, 0.7504767885105297, 0.08043755479625991, -1.9, 0.54, -0.976, 1.296, 0.64, ... ], [ -0.784, -0.9, -0.584, -1.972, -1.924, -1.352, 1.8976141976429022, 0.476, ... ], [ -0.548, -0.196, 1.0, -0.016, 1.224, -1.328, 1.364, -0.5464505689833805, ... ], [ -0.68, 1.7451829177631781, -1.052, 1.478749760302902, 0.492, 1.5501938177486967, 0.4, -1.428, ... ], ... ],
    	[ [ 0.9812975859122248, 0.9314403422809213, 0.744, 0.68, 0.848, -0.096, -1.308, 0.632, ... ], [ -0.672, -0.108, 1.6021028315952648, 1.6745104516995022, -0.02, -0.068, 1.3041246471203918, -0.184, ... ], [ 0.452, -0.964, -2.0, -1.472, -0.26, -0.256, 0.328, -0.404, ... ], [ 0.8668881148915891, -0.752, -0.48, 0.228, 0.776, 1.3104867741479131, 0.6395649916233258, 0.676, ... ], [ -0.9143022376453612, 1.7075450425686869, 1.1137899439175118, -0.876, 0.224, 1.367784352283093, -0.09367526243638855, -0.76, ... ], [ -1.48, -1.16, 0.712, -1.024, -0.08, 1.12, -0.688, 1.43902274646892, ... ], [ 0.0038217295259326822, 1.532, -1.648, -1.28, 0.46, -0.42, -0.12, 0.488, ... ], [ -0.46, 1.6525559425542056, -0.816, 1.9116933666851643, 0.272, -1.292, -0.648, 0.112, ... ], ... ],
    	[ [ -1.392, -1.612, -0.416, 1.396, -1.664, 0.7052938618593002, 0.068, 1.67688625342063, ... ], [ -1.764, -1.792, 0.56, -1.508, 0.9901128172326484, -2.0, -1.248, 0.308, ... ], [ -1.94, 1.128, 1.1820554764920868, 0.807224693453008, 1.6699108736510175, 0.768, 0.228, 1.0840436354942793, ... ], [ 0.936, 1.2336378840825426, -2.0, -1.184, 0.512, 0.56, 0.184, -0.412, ... ], [ -1.832, -1.576, 0.916, 0.3630327221766838, 1.5495905345343965, 0.788, -1.448, 1.228, ... ], [ -1.496, -0.032, -0.704, -1.208, -0.596, 0.616, -0.684, 0.736, ... ], [ -1.68, -0.276, -1.244, 0.568, 0.7687578816918106, 0.5004312887677752, 1.6751274457151173, 0.32, ... ], [ 0.152, 0.16, 1.6783184793121717, -0.68, -1.416, -1.624, -1.916, 1.0139108736510176, ... ], ... ],
    	[ [ -0.468, 1.4656578420252326, -1.852, -0.936, 1.3962056765226065, -1.176, 0.476, 0.6405085830070016, ... ], [ 1.672, -1.084, -1.26, -0.252, 1.184, -0.62, -0.528, 0.236, ... ], [ -1.924, -0.696, -1.648, 0.052, -0.5431274368270659, -1.02, 1.112, 0.896, ... ], [ 0.39348583313562435, -0.188, 1.6829691287397144, 0.812, -1.472, 1.088, 1.6182947873174993, 1.7490009321242377, ... ], [ 1.0, 1.413182917763178, -0.352, -0.15349328124147354, 1.424340295948305, 1.432, 0.64, 1.396, ... ], [ 0.236, -1.688, -0.6, -1.724, -1.364, 0.9, -0.48, 1.6299308315937078, ... ], [ -0.5, 1.322271111987923, -1.536, -0.472, -1.684, -1.656, -0.12, -1.544, ... ], [ 1.5380355229934222, -1.224, -0.848, 1.096, -0.388, 0.6768488811773195, -0.852, 1.6976952159350531, ... ], ... ],
    	[ [ -1.932, -0.228, 0.104, -1.632, -1.096, -0.316, -1.136, -0.8, ... ], [ 1.7731255925767062, -0.14, 0.31771145871286993, 0.2149616984099678, -1.512, 0.4003839247765457, 0.9804312954338137, -1.336, ... ], [ -0.432, 1.8820455075197993, 0.832, -1.192, -1.084, 0.968, -0.392, -1.324, ... ], [ -0.412, 1.444, -1.848, 1.7288189309312072, -1.296, 1.8988644373399999, 0.92, -0.872, ... ], [ 1.5312147167036758, 1.272, 0.9777151827657945, -0.5, -0.516, -0.372, 1.3497824913676373, -1.804, ... ], [ 0.176, 0.0, -1.164, -1.82, 1.196, 1.9336478591653652, 0.512, -1.124, ... ], [ 1.208, 0.276, 1.5061501955864944, 0.132, 0.464, -0.828, 1.1550663764776052, -1.528, ... ], [ -1.632, 0.772, -0.404, 1.2572739083606357, 1.2786251126270072, 0.372, -1.776, 0.356, ... ], ... ],
    	[ [ 1.488, -0.1, -1.676, 0.736, -1.524, 1.1049809697375217, 0.58, 1.8616042253375955, ... ], [ -1.128, 0.838314751926228, -1.1025652682406066, 0.7228307874831044, 0.6558142903081348, -1.78, -0.98, -1.628, ... ], [ 0.42015830808735155, -1.408, -0.012, 0.28, 1.4671810557367158, -0.2093586462616855, 0.9863458909157431, -0.52, ... ], [ -0.424, 0.26, 0.7322356023265778, -0.624, -0.62, -0.64, -0.02, 1.348, ... ], [ 0.18, 1.36, -0.168, 0.8709753947681991, 1.8912383942552646, -1.42, 0.6228843952826902, 0.488, ... ], [ -0.852, 1.317263936055329, 0.6193829926523082, -1.344, -1.076, 0.7233967023426164, -0.668, 1.197273917248687, ... ], [ 0.9897251550711011, 0.316, 1.7412975770241736, 1.465799911778793, 0.812, -0.336, 0.088, 0.248, ... ], [ 0.384, -0.128, 1.804, 1.172, 0.612, 0.032, -1.172, -1.564, ... ], ... ],
    	[ [ 1.376, -0.392, -1.824, -1.524, -0.592, -1.056, 1.36, -1.328, ... ], [ 1.4819345378705298, -0.004, -1.876, 0.288, -0.352, -1.172, 1.8566942971428924, 0.908, ... ], [ -0.416, 0.7075213561290465, -1.584, 1.3615905345343966, 1.216, -1.712, -1.924, 1.340698021195817, ... ], [ -1.544, -1.4, -0.46, -1.72, 0.972, -1.236, 0.772, 0.344, ... ], [ -1.228, -1.488, -0.9, -0.432, 0.732, -1.584, -1.448, 0.104, ... ], [ 1.4579582287541957, -1.784, 0.304, -0.096, 0.944, -1.788, -1.956, -0.156, ... ], [ -1.696, 1.1934185156457304, -0.368, 0.296, -1.94, -1.164, 0.987396693454565, 0.22, ... ], [ -1.564, 1.076, -0.328, 0.904, 1.2246469420397141, 0.5894858331356244, 0.788, 0.7221227850939294, ... ], ... ],
    	...
    ]

To produce the following output:

TrainingTester.java:633 executed in 0.19 seconds (0.000 gc):

    Result[] array = ConstantResult.batchResultArray(pop(RefUtil.addRef(data)));
    @Nullable
    Result eval = layer.eval(array);
    assert eval != null;
    TensorList tensorList = Result.getData(eval);
    String temp_18_0016 = tensorList.stream().limit(1).map(x -> {
      String temp_18_0017 = x.prettyPrint();
      x.freeRef();
      return temp_18_0017;
    }).reduce((a, b) -> a + "\n" + b).orElse("");
    tensorList.freeRef();
    return temp_18_0016;

Returns

    [
    	[ [ 1.335453987121582, 0.7615431547164917, 0.7017014622688293, 0.22745031118392944, 1.575545072555542, 0.6833966970443726, 1.7425004243850708, 1.1225104331970215, ... ], [ 1.7960000038146973, 0.9288151860237122, 0.7755213379859924, 1.4941128492355347, 0.5568525791168213, 1.4543421268463135, 1.784990906715393, 1.4540728330612183, ... ], [ 0.6455668807029724, 0.7504767775535583, 0.0804375559091568, 1.2766469717025757, 1.2160847187042236, 0.49601995944976807, 1.8976142406463623, 1.0262174606323242, ... ], [ -0.1903795450925827, 1.7451828718185425, 1.780866265296936, 1.4787497520446777, 1.7719999551773071, 1.5501937866210938, 1.6665714979171753, -0.5464505553245544, ... ], [ 0.946338415145874, 1.8387634754180908, 1.6242057085037231, 1.7615232467651367, 0.9401046633720398, 1.921738862991333, 0.22879526019096375, 1.1199862957000732, ... ], [ 1.431511402130127, 0.3957507014274597, 1.6752620935440063, 1.576192021369934, 1.725307583808899, 1.2395586967468262, 0.9481956958770752, 1.8550463914871216, ... ], [ 0.7407578825950623, 1.233634114265442, 1.5547834634780884, 0.9884175658226013, 0.6746924519538879, 1.6815568208694458, 1.7290109395980835, 1.29689621925354, ... ], [ 1.1053985357284546, 1.2371019124984741, 0.6600772738456726, 1.839875340461731, 0.4348370432853699, 1.350797176361084, 0.8392246961593628, 1.8040000200271606, ... ], ... ],
    	[ [ 0.9812976121902466, 0.9314403533935547, 1.6021028757095337, 1.6745104789733887, 0.990112841129303, 0.7052938342094421, 1.3041245937347412, 1.6768862009048462, ... ], [ 0.9359999895095825, 1.2336379289627075, 1.1820554733276367, 0.807224690914154, 1.6699109077453613, 1.3104867935180664, 0.6395649909973145, 1.0840436220169067, ... ], [ -0.9143022298812866, 1.7075450420379639, 1.1137899160385132, 0.363032728433609, 1.5495905876159668, 1.3677843809127808, -0.09367526322603226, 1.4390227794647217, ... ], [ 0.15199999511241913, 1.6525559425354004, 1.6783185005187988, 1.9116933345794678, 0.7687578797340393, 0.5004312992095947, 1.675127387046814, 1.0139108896255493, ... ], [ 1.0741127729415894, 1.2885795831680298, 0.9977862238883972, 1.4077270030975342, 1.521240234375, 1.4313766956329346, 1.7408288717269897, 1.609479546546936, ... ], [ 1.8016815185546875, 0.7456678152084351, 1.1308507919311523, 1.4974995851516724, 1.4973849058151245, 0.8691256046295166, 1.5554004907608032, 0.5136778354644775, ... ], [ 1.147066354751587, 1.336899995803833, 1.8237980604171753, 1.5979881286621094, 1.6991610527038574, 1.5086506605148315, 1.1073766946792603, 1.514365792274475, ... ], [ 0.938763439655304, 1.9250346422195435, 1.7653886079788208, 1.048195719718933, 0.4167379140853882, 1.5388743877410889, 1.6900018453598022, 1.0360000133514404, ... ], ... ],
    	[ [ 1.7731256484985352, 1.4656578302383423, 0.3177114725112915, 0.2149616926908493, 1.3962056636810303, 0.40038391947746277, 0.980431318283081, 0.6405085921287537, ... ], [ 0.3934858441352844, 1.8820455074310303, 1.682969093322754, 1.7288188934326172, -0.5431274175643921, 1.8988643884658813, 1.6182948350906372, 1.749000906944275, ... ], [ 1.531214714050293, 1.4131829738616943, 0.9777151942253113, -0.15349328517913818, 1.4243402481079102, 1.933647871017456, 1.3497824668884277, 1.629930853843689, ... ], [ 1.53803551197052, 1.3222711086273193, 1.506150245666504, 1.2572739124298096, 1.2786251306533813, 0.6768488883972168, 1.1550663709640503, 1.697695255279541, ... ], [ 1.3260055780410767, 1.1987260580062866, 0.1354166567325592, 1.843999981880188, 1.0911710262298584, 1.4880000352859497, 0.5913493633270264, 1.6888662576675415, ... ], [ 1.2230089902877808, 1.454941749572754, 0.5488326549530029, 1.7865678071975708, 1.55921471118927, 0.0718616470694542, 1.7293412685394287, 1.804977297782898, ... ], [ 1.5152621269226074, 1.076516032218933, 1.8605122566223145, 0.8517506718635559, 0.7213038206100464, 1.7348918914794922, 1.45840322971344, 0.5630564093589783, ... ], [ 0.4445185363292694, 1.9638416767120361, 0.9630700945854187, 1.5385241508483887, 0.0830427035689354, -0.2625814974308014, 1.5366606712341309, 1.7096515893936157, ... ], ... ],
    	[ [ 1.4880000352859497, 0.8383147716522217, -1.1025652885437012, 0.7360000014305115, 0.6558142900466919, 1.1049809455871582, 1.8566943407058716, 1.8616042137145996, ... ], [ 0.4201582968235016, 0.707521378993988, 0.7322356104850769, 1.3615905046463013, 1.4671810865402222, -0.20935864746570587, 0.9863458871841431, 1.3480000495910645, ... ], [ 1.4579582214355469, 1.3600000143051147, 0.6193829774856567, 0.8709753751754761, 1.8912384510040283, 0.7233967185020447, 0.6228843927383423, 1.1972739696502686, ... ], [ 0.9897251725196838, 1.1934185028076172, 1.8040000200271606, 1.4657999277114868, 1.2246469259262085, 0.5894858241081238, 0.9873967170715332, 0.7221227884292603, ... ], [ 1.2977488040924072, -0.04906200245022774, -0.45348331332206726, 0.6464630961418152, 1.1261228322982788, 1.3998416662216187, 0.04257403686642647, 0.7850645184516907, ... ], [ 1.9597843885421753, 0.9239463806152344, 0.8886332511901855, 1.7901601791381836, 0.33341851830482483, 0.874190092086792, 1.6641583442687988, 1.8701838254928589, ... ], [ 1.5248899459838867, 1.4477033615112305, 0.7218298316001892, 1.019008994102478, -0.2184942066669464, 1.868138313293457, 1.5397744178771973, 0.7442492842674255, ... ], [ 1.0915449857711792, 0.3363739550113678, 1.33124840259552, 1.317465901374817, 0.7653412222862244, 1.6354241371154785, 1.8926706314086914, 1.34019935131073, ... ], ... ],
    	[ [ 1.1413923501968384, 1.4302711486816406, 0.3383047878742218, 1.7382985353469849, 1.085546851158142, 0.9262611269950867, 0.9831075072288513, 0.7495431900024414, ... ], [ 0.4854559004306793, 0.9983384609222412, 1.479343056678772, 1.0232247114181519, 0.6780891418457031, 0.25030848383903503, 1.7178298234939575, 1.7285122871398926, ... ], [ 1.6099382638931274, 0.20817826688289642, 1.9402393102645874, 0.35680896043777466, 0.9548407793045044, 1.9213749170303345, 1.7391037940979004, 1.815295696258545, ... ], [ 1.1991909742355347, 1.743194818496704, 1.9403640031814575, 1.760828971862793, 1.6889199018478394, 0.8808388710021973, 1.1247715950012207, 1.3234777450561523, ... ], [ 0.6862137913703918, 1.7436060905456543, 1.2119863033294678, 1.2214758396148682, 0.21107636392116547, 1.0404748916625977, 1.5936516523361206, 1.2649298906326294, ... ], [ 0.7267734408378601, 1.8128762245178223, 1.4162729978561401, 1.5805222988128662, 1.210625171661377, 1.1145341396331787, 0.9784867763519287, 1.7117606401443481, ... ], [ 1.4508980512619019, 0.23890434205532074, 1.483669638633728, 0.8257388472557068, 0.9216578602790833, 0.502132773399353, 1.7230265140533447, 1.2892838716506958, ... ], [ 1.0776715278625488, 0.59593266248703, 1.8487852811813354, 0.39429107308387756, 1.4873768091201782, 0.8916796445846558, 1.9262511730194092, 1.575487732887268, ... ], ... ],
    	[ [ 0.45385977625846863, -0.04985726252198219, 1.216478705406189, 1.5415905714035034, 1.5886132717132568, 1.7093175649642944, 1.045499563217163, 0.6217325925827026, ... ], [ 1.6492003202438354, 0.7366743683815002, -0.12991459667682648, 0.4091555178165436, 1.6512047052383423, 1.5121819972991943, 0.6742948293685913, 1.8340791463851929, ... ], [ 1.7521246671676636, 0.40441757440567017, 1.4028507471084595, -0.1797780990600586, 1.989556908607483, 1.7568188905715942, 1.9371591806411743, 1.0189417600631714, ... ], [ 0.9782947897911072, 0.7315787076950073, 0.7940354943275452, 0.6197369694709778, 1.5908170938491821, 1.1886032819747925, 1.347161054611206, 0.6204749345779419, ... ], [ 0.7178398370742798, 1.7239999771118164, 1.012613296508789, 1.6399999856948853, 1.6553667783737183, 0.30399999022483826, 1.764775276184082, 1.6496516466140747, ... ], [ 0.41245123744010925, 1.7020118236541748, 0.9749990701675415, 1.5364686250686646, 1.7680000066757202, 1.4819681644439697, 1.3180454969406128, 1.7574084997177124, ... ], [ 1.921920895576477, 1.8072857856750488, 1.653489589691162, 1.6753767728805542, 1.392330288887024, 0.5074577331542969, 1.8092066049575806, 1.6748408079147339, ... ], [ 0.952957272529602, 1.2012938261032104, 1.5928562879562378, 1.8438180685043335, 1.6562730073928833, 1.5232146978378296, 1.7224094867706299, 0.4437980651855469, ... ], ... ],
    	[ [ 0.031824275851249695, -0.12227484583854675, 1.909364938735962, 0.4892365336418152, 1.4899208545684814, 1.8167753219604492, 1.8226251602172852, 1.178544044494629, ... ], [ 0.9990090727806091, 1.686898112297058, 1.5399999618530273, 0.649030864238739, 1.6043540239334106, 1.7159663438796997, 1.6404449939727783, 1.2020118236541748, ... ], [ 0.6161483526229858, 1.8824331760406494, 0.4897824823856354, 0.5406805872917175, 1.0341365337371826, 0.8615132570266724, 0.08696170151233673, 1.6233431100845337, ... ], [ 0.5240872502326965, 1.5995450019836426, 1.6222611665725708, 1.7290109395980835, 1.4857151508331299, 0.26606544852256775, 1.47348952293396, 1.7907733917236328, ... ], [ 1.573752522468567, 0.4095294773578644, 1.0971791744232178, 1.590466856956482, 1.0711573362350464, 1.980488657951355, 1.5720000267028809, 0.615272045135498, ... ], [ 1.7662885189056396, 1.5422375202178955, 1.1785677671432495, 1.8094558715820312, 1.8115350008010864, 0.5934184789657593, 1.597068190574646, 1.2812365293502808, ... ], [ 0.4396933615207672, 0.25780242681503296, 0.05730385333299637, 1.5731356143951416, 1.9330345392227173, 1.5120474100112915, 1.5819544792175293, 1.8911710977554321, ... ], [ -0.04853598400950432, 1.8329672813415527, 1.3849809169769287, 1.0634876489639282, 1.5261027812957764, 1.6153056621551514, 1.730658769607544, 1.3421028852462769, ... ], ... ],
    	[ [ 0.9710464477539062, 1.75371515750885, 0.9397132992744446, 0.23700718581676483, 1.4964549541473389, 1.7463995218276978, 1.5223321914672852, 1.6925559043884277, ... ], [ 1.5765459537506104, 1.20158052444458, 1.0319290161132812, 0.037010908126831055, -0.14011839032173157, 1.8017724752426147, 0.6519626379013062, 1.1407479047775269, ... ], [ 1.6381438970565796, 1.2200000286102295, 1.664067268371582, 1.8791711330413818, 0.9470327496528625, 1.5014421939849854, 0.8458734750747681, 0.8522056937217712, ... ], [ 0.9308980703353882, 0.9889772534370422, 1.7995686531066895, 1.4340218305587769, 1.6454085111618042, 1.4148781299591064, 1.7964786291122437, 1.852229356765747, ... ], [ 1.4428207874298096, 0.31718918681144714, 0.39926832914352417, 1.8457388877868652, 0.6362119317054749, 1.3170782327651978, 1.7001583576202393, 1.4818634986877441, ... ], [ 1.4844648838043213, -0.4379582405090332, 1.8061028718948364, 1.5557843446731567, 0.5429753661155701, 1.5516533851623535, 1.8460454940795898, 1.3830900192260742, ... ], [ 1.684000015258789, 1.4350128173828125, 0.11998630315065384, 0.9776678085327148, 1.0683403015136719, 0.24442756175994873, 1.2771356105804443, 1.9724550247192383, ... ], [ 1.5567716360092163, 1.1035724878311157, 1.6568763256072998, 0.506779670715332, 1.6124587059020996, 1.6358180046081543, 1.7233468294143677, 0.8139071464538574, ... ], ... ],
    	...
    ]

Conjugate Gradient Descent

First, we use a conjugate gradient descent method, which converges the fastest for purely linear functions.

TrainingTester.java:452 executed in 99.36 seconds (5.188 gc):

    IterativeTrainer iterativeTrainer = new IterativeTrainer(trainable.addRef());
    try {
      iterativeTrainer.setLineSearchFactory(label -> new QuadraticSearch());
      iterativeTrainer.setOrientation(new GradientDescent());
      iterativeTrainer.setMonitor(TrainingTester.getMonitor(history));
      iterativeTrainer.setTimeout(30, TimeUnit.SECONDS);
      iterativeTrainer.setMaxIterations(250);
      iterativeTrainer.setTerminateThreshold(0);
      return iterativeTrainer.run();
    } finally {
      iterativeTrainer.freeRef();
    }
Logging
Reset training subject: 1596194143486
Reset training subject: 1597959915083
Constructing line search parameters: GD
F(0.0) = LineSearchPoint{point=PointSample{avg=0.8532350811317408}, derivative=-2.666359628701323E-7}
F(1.0E-10) = LineSearchPoint{point=PointSample{avg=0.8532350811317408}, derivative=-2.666359624947417E-7}, evalInputDelta = 0.0
F(7.000000000000001E-10) = LineSearchPoint{point=PointSample{avg=0.8532350811317408}, derivative=-2.666359624947417E-7}, evalInputDelta = 0.0
F(4.900000000000001E-9) = LineSearchPoint{point=PointSample{avg=0.8532350811317408}, derivative=-2.666359624947417E-7}, evalInputDelta = 0.0
New Minimum: 0.8532350811317408 > 0.8532350811317407
F(3.430000000000001E-8) = LineSearchPoint{point=PointSample{avg=0.8532350811317407}, derivative=-2.666359624947417E-7}, evalInputDelta = -1.1102230246251565E-16
F(2.4010000000000004E-7) = LineSearchPoint{point=PointSample{avg=0.8532350811317407}, derivative=-2.666359624947417E-7}, evalInputDelta = -1.1102230246251565E-16
New Minimum: 0.8532350811317407 > 0.8532350811317404
F(1.6807000000000003E-6) = LineSearchPoint{point=PointSample{avg=0.8532350811317404}, derivative=-2.666359624947417E-7}, evalInputDelta = -4.440892098500626E-16
New Minimum: 0.8532350811317404 > 0.8532350811317375
F(1.1764900000000001E-5) = LineSearchPoint{point=PointSample{avg=0.8532350811317375}, derivative=-2.666359624947417E-7}, evalInputDelta = -3.3306690738754696E-15
New Minimum: 0.8532350811317375 > 0.8532350811317169
F(8.235430000000001E-5) = LineSearchPoint{point=PointSample{avg=0.8532350811317169}, derivative=-2.666359624947417E-7}, evalInputDelta = -2.398081733190338E-14
New Minimum: 0.8532350811317169 > 0.8532350810761742
F(5.764801000000001E-4) = LineSearchPoint{point=PointSample{avg=0.8532350810761742}, derivative=-2.6663595809670613E-7}, evalInputDelta = -5.5566662382489085E-11
New Minimum: 0.8532350810761742 > 0.8532350802769224
F(0.004035360700000001) = LineSearchPoint{point=PointSample{avg=0.8532350802769224}, derivative=-2.666340791119983E-7}, evalInputDelta = -8.548184382561885E-10
New Minimum: 0.8532350802769224 > 0.8532350735504727
F(0.028247524900000005) = LineSearchPoint{point=PointSample{avg=0.8532350735504727}, derivative=-2.666215527680048E-7}, evalInputDelta = -7.581268168799227E-9
New Minimum: 0.8532350735504727 > 0.8532350281669355
F(0.19773267430000002) = LineSearchPoint{point=PointSample{avg=0.8532350281669355}, derivative=-2.6653952088817446E-7}, evalInputDelta = -5.2964805319355435E-8
New Minimum: 0.8532350281669355 > 0.853234712128258
F(1.3841287201) = LineSearchPoint{point=PointSample{avg=0.853234712128258}, derivative=-2.664154480065881E-7}, evalInputDelta = -3.6900348288071427E-7
New Minimum: 0.853234712128258 > 0.8532324996325269
F(9.688901040700001) = LineSearchPoint{point=PointSample{avg=0.8532324996325269}, derivative=-2.6641251258820903E-7}, evalInputDelta = -2.5814992139494564E-6
New Minimum: 0.8532324996325269 > 0.8532170122641546
F(67.8223072849) = LineSearchPoint{point=PointSample{avg=0.8532170122641546}, derivative=-2.6641008380484133E-7}, evalInputDelta = -1.8068867586262805E-5
New Minimum: 0.8532170122641546 > 0.8531086044510987
F(474.7561509943) = LineSearchPoint{point=PointSample{avg=0.8531086044510987}, derivative=-2.663931444377176E-7}, evalInputDelta = -1.2647668064214024E-4
New Minimum: 0.8531086044510987 > 0.8523499425925063
F(3323.2930569601003) = LineSearchPoint{point=PointSample{avg=0.8523499425925063}, derivative=-2.6627456825166896E-7}, evalInputDelta = -8.851385392345534E-4
New Minimum: 0.8523499425925063 > 0.8470517186840029
F(23263.0513987207) = LineSearchPoint{point=PointSample{avg=0.8470517186840029}, derivative=-2.650427112433661E-7}, evalInputDelta = -0.006183362447737983
New Minimum: 0.8470517186840029 > 0.8107162546278615
F(162841.3597910449) = LineSearchPoint{point=PointSample{avg=0.8107162546278615}, derivative=-2.5564149934386453E-7}, evalInputDelta = -0.04251882650387939
New Minimum: 0.8107162546278615 > 0.5908652916729252
F(1139889.5185373144) = LineSearchPoint{point=PointSample{avg=0.5908652916729252}, derivative=-1.962118100558294E-7}, evalInputDelta = -0.2623697894588156
New Minimum: 0.5908652916729252 > 0.17311575733817255
F(7979226.6297612) = LineSearchPoint{point=PointSample{avg=0.17311575733817255}, derivative=3.831829080768692E-8}, evalInputDelta = -0.6801193237935683
0.17311575733817255 <= 0.8532350811317408
New Minimum: 0.17311575733817255 > 0.14650766014280134
F(6976616.1636594515) = LineSearchPoint{point=PointSample{avg=0.14650766014280134}, derivative=1.4502834489079525E-8}, evalInputDelta = -0.7067274209889395
Right bracket at 6976616.1636594515
New Minimum: 0.14650766014280134 > 0.14289437403491176
F(6616720.231593871) = LineSearchPoint{point=PointSample{avg=0.14289437403491176}, derivative=5.533164904858819E-9}, evalInputDelta = -0.7103407070968291
Right bracket at 6616720.231593871
New Minimum: 0.14289437403491176 > 0.14237980476574585
F(6482203.122801898) = LineSearchPoint{point=PointSample{avg=0.14237980476574585}, derivative=2.1109762154313593E-9}, evalInputDelta = -0.710855276365995
Right bracket at 6482203.122801898
Converged to right
Fitness changed from 0.8532350811317408 to 0.14237980476574585
Iteration 1 complete. Error: 0.14237980476574585 Total: 99.3542; Orientation: 2.2707; Line Search: 91.2363
Final threshold in iteration 1: 0.14237980476574585 (> 0.0) after 99.355s (< 30.000s)

Returns

    0.14237980476574585

This training apply resulted in the following configuration:

TrainingTester.java:610 executed in 0.00 seconds (0.000 gc):

    RefList<double[]> state = network.state();
    assert state != null;
    String description = state.stream().map(RefArrays::toString).reduce((a, b) -> a + "\n" + b)
        .orElse("");
    state.freeRef();
    return description;

Returns

    

And regressed input:

TrainingTester.java:622 executed in 0.01 seconds (0.000 gc):

    return RefArrays.stream(RefUtil.addRef(data)).flatMap(x -> {
      return RefArrays.stream(x);
    }).limit(1).map(x -> {
      String temp_18_0015 = x.prettyPrint();
      x.freeRef();
      return temp_18_0015;
    }).reduce((a, b) -> a + "\n" + b).orElse("");

Returns

    [
    	[ [ -0.908, 0.032, -1.812, -0.272, 0.66, 0.224, 0.492, -0.564, ... ], [ -0.644, 1.531097378837083, -1.42, -1.232, -0.592, -0.512, 1.5857394201000714, 1.4324697151518646, ... ], [ -1.968, -1.708, -0.392, -0.852, -1.872, -0.016, -0.384, -1.916, ... ], [ -0.528, -1.068, 1.2386789977920354, 0.432, 0.048, 1.033937527348587, 0.668, -0.13491417211077406, ... ], [ 1.0944735508235923, -1.94, -1.712, 1.543853226858301, -0.5332260492704608, 0.036, -1.644, -1.152, ... ], [ -0.588, -1.84, -0.572, 1.144, -1.584, 1.4294607374918216, -1.192, 0.48824206478834653, ... ], [ -1.788, 0.296, 1.204, -0.688, 0.778128221183058, 0.372, 0.5478675307419532, -1.596, ... ], [ 1.3380406816595984, 1.68, 0.8830529720469324, 1.268, 1.772, -1.044, -1.224, -1.544, ... ], ... ],
    	[ [ 1.5064660084448422, -0.408, -1.492, -1.356, 1.718055040813839, 1.0289836457729709, 0.016, -1.604, ... ], [ -0.52, -0.228, 1.7348993452826849, 1.6525507397619186, -1.164, 0.104, -1.124, -1.0, ... ], [ 0.11324005129202819, 1.9263854883556961, -1.756, -0.84, 0.44640729777103955, 0.424, -0.42, 0.036, ... ], [ 1.796, 0.144, -1.768, 1.7720074502611034, -0.524, -1.984, 1.0047485451483493, 0.444, ... ], [ -1.656, 0.9143633586547382, 1.9793839978038148, -1.9, 0.54, -0.976, 1.296, 0.64, ... ], [ -0.784, -0.9, -0.584, -1.972, -1.924, -1.352, 1.7052255631730824, 0.476, ... ], [ -0.548, -0.196, 1.0, -0.016, 1.224, -1.328, 1.364, 1.943912534170658, ... ], [ -0.68, 1.37465679439696, -1.052, 1.5571302346793763, 0.492, 1.332865929648896, 0.4, -1.428, ... ], ... ],
    	[ [ 0.2616218145171726, 1.8898204447101254, 0.744, 0.68, 0.848, -0.096, -1.308, 0.632, ... ], [ -0.672, -0.108, 1.4132770078210752, 1.9844698625400994, -0.02, -0.068, 1.4216954072896524, -0.184, ... ], [ 0.452, -0.964, -2.0, -1.472, -0.26, -0.256, 0.328, -0.404, ... ], [ 0.2754713899390392, -0.752, -0.48, 0.228, 0.776, 1.9410938010947663, 1.715515870423307, 0.676, ... ], [ 1.3729840581762318, 1.8500551697785446, -0.8564114168813228, -0.876, 0.224, 1.278715551129586, 1.5273765475427112, -0.76, ... ], [ -1.48, -1.16, 0.712, -1.024, -0.08, 1.12, -0.688, 1.4318972298812198, ... ], [ -1.193262703937423, 1.532, -1.648, -1.28, 0.46, -0.42, -0.12, 0.488, ... ], [ -0.46, 1.9482643050304804, -0.816, 1.8511266339076478, 0.272, -1.292, -0.648, 0.112, ... ], ... ],
    	[ [ -1.392, -1.612, -0.416, 1.396, -1.664, 1.2397064905636608, 0.068, 1.7125137325681656, ... ], [ -1.764, -1.792, 0.56, -1.508, 1.2680074502611034, -2.0, -1.248, 0.308, ... ], [ -1.94, 1.128, 1.6345248849304093, 1.6765359866279468, 1.0713687217254058, 0.768, 0.228, 1.6968368357842132, ... ], [ 0.936, 0.7206016983125331, -2.0, -1.184, 0.512, 0.56, 0.184, -0.412, ... ], [ -1.832, -1.576, 0.916, 0.8226275899911011, 1.6778496214806902, 0.788, -1.448, 1.228, ... ], [ -1.496, -0.032, -0.704, -1.208, -0.596, 0.616, -0.684, 0.736, ... ], [ -1.68, -0.276, -1.244, 0.568, 1.9409029967191191, 0.6785688424702399, 0.8521319463136097, 0.32, ... ], [ 0.152, 0.16, 1.5785614566914892, -0.68, -1.416, -1.624, -1.916, 0.41536872172540595, ... ], ... ],
    	[ [ -0.468, 1.886062472651413, -1.852, -0.936, 1.0185541261833266, -1.176, 0.476, 1.5775121452927554, ... ], [ 1.672, -1.084, -1.26, -0.252, 1.184, -0.62, -0.528, 0.236, ... ], [ -1.924, -0.696, -1.648, 0.052, 0.2798681273805076, -1.02, 1.112, 0.896, ... ], [ 1.3376148872005063, -0.188, 0.5963299798680082, 0.812, -1.472, 1.088, 1.8391852754932154, 1.4354788402001426, ... ], [ 1.0, 1.04265679439696, -0.352, 1.4105544648924702, 1.630979865371831, 1.432, 0.64, 1.396, ... ], [ 0.236, -1.688, -0.6, -1.724, -1.364, 0.9, -0.48, 1.964829496064286, ... ], [ -0.5, 1.8638092324714115, -1.536, -0.472, -1.684, -1.656, -0.12, -1.544, ... ], [ 1.0570641474385878, -1.224, -0.848, 1.096, -0.388, 1.820492029088116, -0.852, 1.0100841899491098, ... ], ... ],
    	[ [ -1.932, -0.228, 0.104, -1.632, -1.096, -0.316, -1.136, -0.8, ... ], [ 1.5771743580309714, -0.14, 1.8176297140285955, 1.6364994793492194, -1.512, 1.219816645885456, 1.158568897740828, -1.336, ... ], [ -0.432, 1.867794580666851, 0.832, -1.192, -1.084, 0.968, -0.392, -1.324, ... ], [ -0.412, 1.444, -1.848, 1.4723007570386197, -1.296, 1.628095328493706, 0.92, -0.872, ... ], [ 1.933805617882036, 1.272, 1.2235450379821073, -0.5, -0.516, -0.372, 1.8877578983645948, -1.804, ... ], [ 0.176, 0.0, -1.164, -1.82, 1.196, 1.8873320532407964, 0.512, -1.124, ... ], [ 1.208, 0.276, 0.6760292044058591, 0.132, 0.464, -0.828, 1.760734020182345, -1.528, ... ], [ -1.632, 0.772, -0.404, 0.8582457530718395, 1.2394348227838414, 0.372, -1.776, 0.356, ... ], ... ],
    	[ [ 1.488, -0.1, -1.676, 0.736, -1.524, -0.14198197098579612, 0.58, 1.2024952312742303, ... ], [ -1.128, 1.9926461051026836, 1.7369472561210348, 0.3059889351495212, 1.9669067218496707, -1.78, -0.98, -1.628, ... ], [ 0.6838018927514844, -1.408, -0.012, 0.28, 1.723699132420204, 1.9389803330456798, -0.6881470750037837, -0.52, ... ], [ -0.424, 0.26, 1.7547451955739999, -0.624, -0.62, -0.64, -0.02, 1.348, ... ], [ 0.18, 1.36, -0.168, 1.505145135201583, 1.973181679327369, -1.42, 1.2855561028325861, 0.488, ... ], [ -0.852, 0.4515154211729874, 1.7523378056853136, -1.344, -1.076, 1.0689835720788534, -0.668, 0.7982458267659569, ... ], [ 1.7022753698809594, 0.316, 1.021621740823055, -0.03768112182952965, 0.812, -0.336, 0.088, 0.248, ... ], [ 0.384, -0.128, 1.804, 1.172, 0.612, 0.032, -1.172, -1.564, ... ], ... ],
    	[ [ 1.376, -0.392, -1.824, -1.524, -0.592, -1.056, 1.36, -1.328, ... ], [ 0.5627446726295628, -0.004, -1.876, 0.288, -0.352, -1.172, 1.4826054602901433, 0.908, ... ], [ -0.416, 1.170679034639094, -1.584, 1.4898496214806902, 1.216, -1.712, -1.924, -0.2874792157563446, ... ], [ -1.544, -1.4, -0.46, -1.72, 0.972, -1.236, 0.772, 0.344, ... ], [ -1.228, -1.488, -0.9, -0.432, 0.732, -1.584, -1.448, 0.104, ... ], [ 0.21812084461607206, -1.784, 0.304, -0.096, 0.944, -1.788, -1.956, -0.156, ... ], [ -1.696, 1.8454019531239014, -0.368, 0.296, -1.94, -1.164, 1.332983498384736, 0.22, ... ], [ -1.564, 1.076, -0.328, 0.904, 1.4918533373994771, 1.5336148872005062, 0.788, 1.4667377453128965, ... ], ... ],
    	...
    ]

To produce the following output:

TrainingTester.java:633 executed in 0.29 seconds (0.000 gc):

    Result[] array = ConstantResult.batchResultArray(pop(RefUtil.addRef(data)));
    @Nullable
    Result eval = layer.eval(array);
    assert eval != null;
    TensorList tensorList = Result.getData(eval);
    String temp_18_0016 = tensorList.stream().limit(1).map(x -> {
      String temp_18_0017 = x.prettyPrint();
      x.freeRef();
      return temp_18_0017;
    }).reduce((a, b) -> a + "\n" + b).orElse("");
    tensorList.freeRef();
    return temp_18_0016;

Returns

    [
    	[ [ 1.506466031074524, 1.531097412109375, 1.7348994016647339, 1.6525506973266602, 1.718055009841919, 1.0289835929870605, 1.5857393741607666, 1.4324697256088257, ... ], [ 1.7960000038146973, 1.926385521888733, 1.238679051399231, 1.7720074653625488, 0.446407288312912, 1.0339375734329224, 1.0047485828399658, 0.4440000057220459, ... ], [ 1.0944736003875732, 0.9143633842468262, 1.979383945465088, 1.5438532829284668, 0.5400000214576721, 1.4294607639312744, 1.7052255868911743, 0.6399999856948853, ... ], [ 1.3380407094955444, 1.6799999475479126, 1.2039999961853027, 1.557130217552185, 1.7719999551773071, 1.3328659534454346, 1.3639999628067017, 1.9439125061035156, ... ], [ 1.780022144317627, 1.6679999828338623, 1.2465540170669556, 1.5976366996765137, 0.7519999742507935, 1.8469210863113403, 0.29292479157447815, 1.9073543548583984, ... ], [ 1.4279485940933228, 0.16060912609100342, 1.4365578889846802, 1.9859083890914917, 1.5640000104904175, 0.594700813293457, 0.10382364690303802, 1.5272732973098755, ... ], [ 1.9129029512405396, 1.9746863842010498, 1.7792366743087769, 1.953923225402832, 0.9276477694511414, 1.6637431383132935, 1.882209300994873, 1.7992441654205322, ... ], [ 0.823941171169281, 1.3617981672286987, 1.4189432859420776, 1.7223045825958252, 1.7388041019439697, 0.7878825068473816, 1.70853590965271, 1.8040000200271606, ... ], ... ],
    	[ [ 0.2616218030452728, 1.8898204565048218, 1.41327702999115, 1.9844698905944824, 1.2680073976516724, 1.2397065162658691, 1.421695351600647, 1.7125136852264404, ... ], [ 0.9359999895095825, 1.128000020980835, 1.634524941444397, 1.676535964012146, 1.07136869430542, 1.9410938024520874, 1.7155158519744873, 1.6968368291854858, ... ], [ 1.3729840517044067, 1.85005521774292, 0.9160000085830688, 0.822627604007721, 1.6778496503829956, 1.2787156105041504, 1.5273765325546265, 1.4318972826004028, ... ], [ 0.15199999511241913, 1.9482643604278564, 1.5785614252090454, 1.8511266708374023, 1.9409029483795166, 0.6785688400268555, 0.8521319627761841, 0.4880000054836273, ... ], [ 1.352007508277893, 1.2636404037475586, 0.5440000295639038, 1.4932329654693604, 0.9761394262313843, 1.4240000247955322, 1.95103120803833, 1.2599999904632568, ... ], [ 1.901438593864441, 1.6327928304672241, 1.6474497318267822, 1.6542606353759766, 2.003295660018921, 0.673174262046814, 1.184000015258789, 1.8675233125686646, ... ], [ 1.7527340650558472, 0.6320000290870667, 1.4559999704360962, 1.7583119869232178, 1.1360000371932983, 0.5217685103416443, 0.527999997138977, 0.7733135223388672, ... ], [ 0.4959999918937683, 1.7575852870941162, 1.0172107219696045, 0.671999990940094, 0.6554422378540039, 1.7348257303237915, 1.2640000581741333, 1.0360000133514404, ... ], ... ],
    	[ [ 1.6720000505447388, 1.886062502861023, 1.8176296949386597, 1.6364995241165161, 1.184000015258789, 1.2198166847229004, 1.1585688591003418, 1.5775121450424194, ... ], [ 1.337614893913269, 1.8677946329116821, 0.8320000171661377, 1.4723007678985596, 0.27986812591552734, 1.628095269203186, 1.8391852378845215, 1.4354788064956665, ... ], [ 1.9338055849075317, 1.2719999551773071, 1.2235450744628906, 1.4105544090270996, 1.6309798955917358, 1.8873320817947388, 1.8877578973770142, 1.964829444885254, ... ], [ 1.2079999446868896, 1.8638092279434204, 0.6760292053222656, 1.0959999561309814, 1.239434838294983, 1.8204920291900635, 1.760733962059021, 1.0100841522216797, ... ], [ 0.656000018119812, 1.5977541208267212, 1.4144443273544312, 1.843999981880188, 0.8809687495231628, 1.4880000352859497, 1.5782314538955688, 1.1799999475479126, ... ], [ 2.003251552581787, 1.9430385828018188, 0.164000004529953, 1.921952247619629, 1.9618055820465088, 0.7416588664054871, 1.6224586963653564, 1.8121027946472168, ... ], [ 1.2765576839447021, 0.21199999749660492, 1.8079999685287476, 0.6166091561317444, 1.7224369049072266, 0.25999999046325684, 1.0399999618530273, 0.7020036578178406, ... ], [ 1.8482424020767212, 1.7001980543136597, 0.314649373292923, 1.0611155033111572, 1.0093580484390259, 0.38940194249153137, 1.0164989233016968, 0.40924736857414246, ... ], ... ],
    	[ [ 1.4880000352859497, 1.9926460981369019, 1.7369472980499268, 0.7360000014305115, 1.9669066667556763, -0.14198197424411774, 1.4826054573059082, 1.2024952173233032, ... ], [ 0.6838018894195557, 1.1706790924072266, 1.7547452449798584, 1.48984956741333, 1.7236990928649902, 1.9389803409576416, 0.7720000147819519, 1.3480000495910645, ... ], [ 0.21812084317207336, 1.3600000143051147, 1.7523378133773804, 1.5051451921463013, 1.9731817245483398, 1.068983554840088, 1.2855560779571533, 0.7982458472251892, ... ], [ 1.7022753953933716, 1.8454020023345947, 1.8040000200271606, 1.1720000505447388, 1.4918533563613892, 1.5336148738861084, 1.3329834938049316, 1.4667377471923828, ... ], [ 1.6896514892578125, 1.6931232213974, 1.577284812927246, 1.5977176427841187, 1.8707377910614014, 1.2879999876022339, 1.898767352104187, 2.0177764892578125, ... ], [ 1.8707154989242554, -0.15556727349758148, 1.9432075023651123, 1.4267596006393433, 0.9854020476341248, 1.910950779914856, 1.9278018474578857, 1.25600004196167, ... ], [ 0.41200000047683716, 1.8538570404052734, 0.6185100674629211, 1.7992515563964844, 1.6590757369995117, 1.812000036239624, 1.3359999656677246, 0.9793908596038818, ... ], [ 1.2340550422668457, 0.689086377620697, 1.87991201877594, 1.3281540870666504, 0.6584587693214417, 0.7080000042915344, 1.8392293453216553, 0.24799999594688416, ... ], ... ],
    	[ [ 0.2280000001192093, 1.9718093872070312, 1.0259158611297607, 1.5119999647140503, 1.003999948501587, 1.0010789632797241, -0.7733288407325745, 1.5190974473953247, ... ], [ 0.2919999957084656, 1.8320223093032837, 0.7454162836074829, 1.8925360441207886, 1.2766313552856445, 0.024000000208616257, 1.614510178565979, 1.4114274978637695, ... ], [ 1.128000020980835, 1.4052627086639404, 1.7086604833602905, -0.3199999928474426, 1.0047192573547363, 1.960565209388733, 1.656000018119812, 1.7226642370224, ... ], [ 1.9224294424057007, 1.2640000581741333, 1.8263559341430664, 1.9710313081741333, 1.8706202507019043, 1.557761549949646, 1.5095486640930176, 1.173842191696167, ... ], [ 1.4023267030715942, 0.4574529826641083, 1.999354362487793, 1.6988844871520996, 1.2834644317626953, 1.831405758857727, 1.4520000219345093, 1.9133505821228027, ... ], [ 0.5720000267028809, 1.381783366203308, 1.330767035484314, 1.7301579713821411, 1.171434998512268, 1.1038458347320557, 1.6090937852859497, 1.9433395862579346, ... ], [ 1.3262016773223877, 1.8350168466567993, 1.7437505722045898, 0.7509210109710693, 1.3420624732971191, 1.7134681940078735, 1.2760000228881836, 1.3569761514663696, ... ], [ 0.7107081413269043, 0.47600001096725464, 1.4461944103240967, 1.8692702054977417, 1.2680000066757202, 1.618480920791626, 1.534348487854004, 1.8925726413726807, ... ], ... ],
    	[ [ 1.7507011890411377, 1.628198504447937, 0.9639999866485596, 1.6698495149612427, 1.7097467184066772, 1.9230825901031494, 1.2022606134414673, 0.1120000034570694, ... ], [ -0.7279999852180481, -0.3199999928474426, 1.722715973854065, 1.6133654117584229, 1.5870753526687622, 1.4559999704360962, 0.8951854109764099, 1.9659008979797363, ... ], [ 1.8696955442428589, 1.3699231147766113, 1.9194495677947998, 1.6300995349884033, 1.971743106842041, 1.5003007650375366, 1.8872807025909424, 1.5070385932922363, ... ], [ 1.1991853713989258, 1.0201616287231445, 0.3160000145435333, 1.1719634532928467, 1.9613431692123413, 0.843016505241394, 0.9279999732971191, 1.4114055633544922, ... ], [ 1.0812404155731201, 1.7239999771118164, 1.1337467432022095, 1.6399999856948853, 0.6007925271987915, 0.30399999022483826, 1.2239999771118164, 1.2879999876022339, ... ], [ 1.5240296125411987, 1.541688084602356, 1.2885212898254395, 1.3919999599456787, 1.7680000066757202, 0.708851158618927, 1.303794503211975, 1.9426716566085815, ... ], [ 1.790099024772644, 1.2479338645935059, 1.343530297279358, 1.0875227451324463, 1.1322495937347412, 0.13199999928474426, 1.7280000448226929, 1.7247192859649658, ... ], [ 0.6240000128746033, 1.7357065677642822, 0.328000009059906, 1.9008220434188843, 1.5707670450210571, 1.9258054494857788, 1.7000000476837158, -0.4326387345790863, ... ], ... ],
    	[ [ 1.809637188911438, 0.5902753472328186, 1.4818347692489624, 1.1982239484786987, 1.358099102973938, 1.1679999828338623, 1.7834349870681763, 1.6345762014389038, ... ], [ 1.7792515754699707, 1.6119999885559082, 1.5399999618530273, 1.7356699705123901, 1.0236254930496216, 1.5698935985565186, 1.5640000104904175, 1.2000000476837158, ... ], [ 0.41307151317596436, 1.4335265159606934, 1.0277578830718994, 0.9539597034454346, 0.9913834929466248, 0.7639999985694885, 1.5084995031356812, 1.0880000591278076, ... ], [ 1.7496737241744995, 1.742055058479309, 1.6970789432525635, 1.882209300994873, 1.731545090675354, 1.1852552890777588, 1.1635301113128662, 1.5485063791275024, ... ], [ 1.440000057220459, 1.9664517641067505, 1.9807415008544922, 1.2876330614089966, 1.6483230590820312, 1.9840513467788696, 1.5720000267028809, 0.8432881236076355, ... ], [ 1.3680000305175781, 1.9377028942108154, 1.3139524459838867, 1.472000002861023, 1.4873247146606445, 1.2454018592834473, 1.5756916999816895, 1.990224003791809, ... ], [ 0.37912657856941223, 1.729218602180481, 1.0584369897842407, 1.8439046144485474, 1.8320000171661377, 1.4839999675750732, 1.596205472946167, 1.6809687614440918, ... ], [ 0.5891964435577393, 1.3733723163604736, 1.2760000228881836, 1.3805725574493408, 1.3372770547866821, 1.9893945455551147, 1.8375413417816162, 1.153277039527893, ... ], ... ],
    	[ [ 0.6432732939720154, 1.9995450973510742, 1.8125872611999512, 1.6442939043045044, 1.3539447784423828, 1.24399995803833, 1.4199999570846558, 1.9882643222808838, ... ], [ 1.4055339097976685, 0.8631191253662109, 1.993871808052063, 0.19020920991897583, 1.463119626045227, 1.8730274438858032, 1.7599782943725586, 1.846172571182251, ... ], [ 0.7480000257492065, 1.2200000286102295, 1.9562129974365234, 1.6689687967300415, 1.4066275358200073, 1.8327780961990356, 1.355346918106079, 0.47455406188964844, ... ], [ 0.8062018156051636, 0.9961027503013611, 1.6214311122894287, 1.7404184341430664, 1.8306715488433838, 0.8920000195503235, 1.33332097530365, 1.1539301872253418, ... ], [ 0.9319999814033508, 1.6674718856811523, 1.8813728094100952, 1.7709211111068726, 1.979369044303894, 1.7624220848083496, 1.9638018608093262, 1.5246165990829468, ... ], [ 1.8086752891540527, 0.8018791675567627, 1.6172770261764526, 1.466715693473816, 1.177145004272461, 1.4199999570846558, 1.8317945003509521, 1.6681102514266968, ... ], [ 1.684000015258789, 0.9611668586730957, 0.9073543548583984, 1.864792823791504, 1.274979829788208, 1.676653504371643, 1.5479047298431396, 1.8299449682235718, ... ], [ 1.9415487051010132, 0.19200000166893005, 1.2879999876022339, 1.9853214025497437, 0.492000013589859, 1.6928220987319946, 1.6119999885559082, 1.4694534540176392, ... ], ... ],
    	...
    ]

Limited-Memory BFGS

Next, we apply the same optimization using L-BFGS, which is nearly ideal for purely second-order or quadratic functions.

TrainingTester.java:509 executed in 55.00 seconds (4.757 gc):

    IterativeTrainer iterativeTrainer = new IterativeTrainer(trainable.addRef());
    try {
      iterativeTrainer.setLineSearchFactory(label -> new ArmijoWolfeSearch());
      iterativeTrainer.setOrientation(new LBFGS());
      iterativeTrainer.setMonitor(TrainingTester.getMonitor(history));
      iterativeTrainer.setTimeout(30, TimeUnit.SECONDS);
      iterativeTrainer.setIterationsPerSample(100);
      iterativeTrainer.setMaxIterations(250);
      iterativeTrainer.setTerminateThreshold(0);
      return iterativeTrainer.run();
    } finally {
      iterativeTrainer.freeRef();
    }
Logging
Reset training subject: 1696184708009
Reset training subject: 1698251666985
Adding measurement 3e816552 to history. Total: 0
LBFGS Accumulation History: 1 points
Constructing line search parameters: GD
Non-optimal measurement 0.8532350811317408 < 0.8532350811317408. Total: 1
th(0)=0.8532350811317408;dx=-2.666359628701323E-7
Adding measurement 30810acb to history. Total: 1
New Minimum: 0.8532350811317408 > 0.8532345069472168
WOLFE (weak): th(2.154434690031884)=0.8532345069472168; dx=-2.66413607531703E-7 evalInputDelta=5.741845240381238E-7
Adding measurement e068547 to history. Total: 2
New Minimum: 0.8532345069472168 > 0.8532339329507661
WOLFE (weak): th(4.308869380063768)=0.8532339329507661; dx=-2.664128440372772E-7 evalInputDelta=1.1481809747415639E-6
Adding measurement 47937044 to history. Total: 3
New Minimum: 0.8532339329507661 > 0.8532316371014387
WOLFE (weak): th(12.926608140191302)=0.8532316371014387; dx=-2.6641237333092883E-7 evalInputDelta=3.444030302168777E-6
Adding measurement 41c9d3b7 to history. Total: 4
New Minimum: 0.8532316371014387 > 0.8532213056953497
WOLFE (weak): th(51.70643256076521)=0.8532213056953497; dx=-2.6641075477764104E-7 evalInputDelta=1.3775436391139095E-5
Adding measurement 203c2e37 to history. Total: 5
New Minimum: 0.8532213056953497 > 0.8531662059918895
WOLFE (weak): th(258.53216280382605)=0.8531662059918895; dx=-2.664021451475316E-7 evalInputDelta=6.887513985132099E-5
Adding measurement 372333df to history. Total: 6
New Minimum: 0.8531662059918895 > 0.8528218731458891
WOLFE (weak): th(1551.1929768229563)=0.8528218731458891; dx=-2.6634833557437944E-7 evalInputDelta=4.132079858517601E-4
Adding measurement 37997c2b to history. Total: 7
New Minimum: 0.8528218731458891 > 0.8503447964115418
WOLFE (weak): th(10858.350837760694)=0.8503447964115418; dx=-2.659075082728627E-7 evalInputDelta=0.0028902847201990323
Adding measurement 57beab50 to history. Total: 8
New Minimum: 0.8503447964115418 > 0.8303317003498624
WOLFE (weak): th(86866.80670208555)=0.8303317003498624; dx=-2.607354484157113E-7 evalInputDelta=0.02290338078187848
Adding measurement 1dad5997 to history. Total: 9
New Minimum: 0.8303317003498624 > 0.6647611271400282
END: th(781801.26031877)=0.6647611271400282; dx=-2.1673950303447227E-7 evalInputDelta=0.18847395399171263
Fitness changed from 0.8532350811317408 to 0.6647611271400282
Iteration 1 complete. Error: 0.6647611271400282 Total: 54.9947; Orientation: 2.3607; Line Search: 46.9595
Final threshold in iteration 1: 0.6647611271400282 (> 0.0) after 54.995s (< 30.000s)

Returns

    0.6647611271400282

This training apply resulted in the following configuration:

TrainingTester.java:610 executed in 0.00 seconds (0.000 gc):

    RefList<double[]> state = network.state();
    assert state != null;
    String description = state.stream().map(RefArrays::toString).reduce((a, b) -> a + "\n" + b)
        .orElse("");
    state.freeRef();
    return description;

Returns

    

And regressed input:

TrainingTester.java:622 executed in 0.01 seconds (0.000 gc):

    return RefArrays.stream(RefUtil.addRef(data)).flatMap(x -> {
      return RefArrays.stream(x);
    }).limit(1).map(x -> {
      String temp_18_0015 = x.prettyPrint();
      x.freeRef();
      return temp_18_0015;
    }).reduce((a, b) -> a + "\n" + b).orElse("");

Returns

    [
    	[ [ -0.908, 0.032, -1.812, -0.272, 0.66, 0.224, 0.492, -0.564, ... ], [ -0.644, 0.7615431649881349, -1.42, -1.232, -0.592, -0.512, 1.7425004660621188, 1.1225104339233998, ... ], [ -1.968, -1.708, -0.392, -0.852, -1.872, -0.016, -0.384, -1.916, ... ], [ -0.528, -1.068, 0.7755213516850208, 0.432, 0.048, 1.4543421579747673, 0.668, 1.4540728924592168, ... ], [ 0.6455668558718008, -1.94, -1.712, 1.2766469287076372, 1.2160847423450754, 0.036, -1.644, -1.152, ... ], [ -0.588, -1.84, -0.572, 1.144, -1.584, 0.4960199534986645, -1.192, 1.0262175041883372, ... ], [ -1.788, 0.296, 1.204, -0.688, 1.740071037098793, 0.372, 1.6665714898288349, -1.596, ... ], [ -0.1903795407726389, 1.68, 1.7808662971444493, 1.268, 1.772, -1.044, -1.224, -1.544, ... ], ... ],
    	[ [ 1.3354540275290885, -0.408, -1.492, -1.356, 1.5755450270145972, 0.6833967112306677, 0.016, -1.604, ... ], [ -0.52, -0.228, 0.7017014819635377, 0.22745031014220224, -1.164, 0.104, -1.124, -1.0, ... ], [ 1.691538780984228, 0.9288152135443211, -1.756, -0.84, 0.5568525985642054, 0.424, -0.42, 0.036, ... ], [ 1.796, 0.144, -1.768, 1.4941128172326483, -0.524, -1.984, 1.7849909642629564, 0.444, ... ], [ -1.656, 0.7504767885105297, 0.08043755479625991, -1.9, 0.54, -0.976, 1.296, 0.64, ... ], [ -0.784, -0.9, -0.584, -1.972, -1.924, -1.352, 1.8976141976429022, 0.476, ... ], [ -0.548, -0.196, 1.0, -0.016, 1.224, -1.328, 1.364, -0.5464505689833805, ... ], [ -0.68, 1.7451829177631781, -1.052, 1.478749760302902, 0.492, 1.5501938177486967, 0.4, -1.428, ... ], ... ],
    	[ [ 0.9812975859122248, 0.9314403422809213, 0.744, 0.68, 0.848, -0.096, -1.308, 0.632, ... ], [ -0.672, -0.108, 1.6021028315952648, 1.6745104516995022, -0.02, -0.068, 1.3041246471203918, -0.184, ... ], [ 0.452, -0.964, -2.0, -1.472, -0.26, -0.256, 0.328, -0.404, ... ], [ 0.8668881148915891, -0.752, -0.48, 0.228, 0.776, 1.3104867741479131, 0.6395649916233258, 0.676, ... ], [ -0.9143022376453612, 1.7075450425686869, 1.1137899439175118, -0.876, 0.224, 1.367784352283093, -0.09367526243638855, -0.76, ... ], [ -1.48, -1.16, 0.712, -1.024, -0.08, 1.12, -0.688, 1.43902274646892, ... ], [ 0.0038217295259326822, 1.532, -1.648, -1.28, 0.46, -0.42, -0.12, 0.488, ... ], [ -0.46, 1.6525559425542056, -0.816, 1.9116933666851643, 0.272, -1.292, -0.648, 0.112, ... ], ... ],
    	[ [ -1.392, -1.612, -0.416, 1.396, -1.664, 0.7052938618593002, 0.068, 1.67688625342063, ... ], [ -1.764, -1.792, 0.56, -1.508, 0.9901128172326484, -2.0, -1.248, 0.308, ... ], [ -1.94, 1.128, 1.1820554764920868, 0.807224693453008, 1.6699108736510175, 0.768, 0.228, 1.0840436354942793, ... ], [ 0.936, 1.2336378840825426, -2.0, -1.184, 0.512, 0.56, 0.184, -0.412, ... ], [ -1.832, -1.576, 0.916, 0.3630327221766838, 1.5495905345343965, 0.788, -1.448, 1.228, ... ], [ -1.496, -0.032, -0.704, -1.208, -0.596, 0.616, -0.684, 0.736, ... ], [ -1.68, -0.276, -1.244, 0.568, 0.7687578816918106, 0.5004312887677752, 1.6751274457151173, 0.32, ... ], [ 0.152, 0.16, 1.6783184793121717, -0.68, -1.416, -1.624, -1.916, 1.0139108736510176, ... ], ... ],
    	[ [ -0.468, 1.4656578420252326, -1.852, -0.936, 1.3962056765226065, -1.176, 0.476, 0.6405085830070016, ... ], [ 1.672, -1.084, -1.26, -0.252, 1.184, -0.62, -0.528, 0.236, ... ], [ -1.924, -0.696, -1.648, 0.052, -0.5431274368270659, -1.02, 1.112, 0.896, ... ], [ 0.39348583313562435, -0.188, 1.6829691287397144, 0.812, -1.472, 1.088, 1.6182947873174993, 1.7490009321242377, ... ], [ 1.0, 1.413182917763178, -0.352, -0.15349328124147354, 1.424340295948305, 1.432, 0.64, 1.396, ... ], [ 0.236, -1.688, -0.6, -1.724, -1.364, 0.9, -0.48, 1.6299308315937078, ... ], [ -0.5, 1.322271111987923, -1.536, -0.472, -1.684, -1.656, -0.12, -1.544, ... ], [ 1.5380355229934222, -1.224, -0.848, 1.096, -0.388, 0.6768488811773195, -0.852, 1.6976952159350531, ... ], ... ],
    	[ [ -1.932, -0.228, 0.104, -1.632, -1.096, -0.316, -1.136, -0.8, ... ], [ 1.7731255925767062, -0.14, 0.31771145871286993, 0.2149616984099678, -1.512, 0.4003839247765457, 0.9804312954338137, -1.336, ... ], [ -0.432, 1.8820455075197993, 0.832, -1.192, -1.084, 0.968, -0.392, -1.324, ... ], [ -0.412, 1.444, -1.848, 1.7288189309312072, -1.296, 1.8988644373399999, 0.92, -0.872, ... ], [ 1.5312147167036758, 1.272, 0.9777151827657945, -0.5, -0.516, -0.372, 1.3497824913676373, -1.804, ... ], [ 0.176, 0.0, -1.164, -1.82, 1.196, 1.9336478591653652, 0.512, -1.124, ... ], [ 1.208, 0.276, 1.5061501955864944, 0.132, 0.464, -0.828, 1.1550663764776052, -1.528, ... ], [ -1.632, 0.772, -0.404, 1.2572739083606357, 1.2786251126270072, 0.372, -1.776, 0.356, ... ], ... ],
    	[ [ 1.488, -0.1, -1.676, 0.736, -1.524, 1.1049809697375217, 0.58, 1.8616042253375955, ... ], [ -1.128, 0.838314751926228, -1.1025652682406066, 0.7228307874831044, 0.6558142903081348, -1.78, -0.98, -1.628, ... ], [ 0.42015830808735155, -1.408, -0.012, 0.28, 1.4671810557367158, -0.2093586462616855, 0.9863458909157431, -0.52, ... ], [ -0.424, 0.26, 0.7322356023265778, -0.624, -0.62, -0.64, -0.02, 1.348, ... ], [ 0.18, 1.36, -0.168, 0.8709753947681991, 1.8912383942552646, -1.42, 0.6228843952826902, 0.488, ... ], [ -0.852, 1.317263936055329, 0.6193829926523082, -1.344, -1.076, 0.7233967023426164, -0.668, 1.197273917248687, ... ], [ 0.9897251550711011, 0.316, 1.7412975770241736, 1.465799911778793, 0.812, -0.336, 0.088, 0.248, ... ], [ 0.384, -0.128, 1.804, 1.172, 0.612, 0.032, -1.172, -1.564, ... ], ... ],
    	[ [ 1.376, -0.392, -1.824, -1.524, -0.592, -1.056, 1.36, -1.328, ... ], [ 1.4819345378705298, -0.004, -1.876, 0.288, -0.352, -1.172, 1.8566942971428924, 0.908, ... ], [ -0.416, 0.7075213561290465, -1.584, 1.3615905345343966, 1.216, -1.712, -1.924, 1.340698021195817, ... ], [ -1.544, -1.4, -0.46, -1.72, 0.972, -1.236, 0.772, 0.344, ... ], [ -1.228, -1.488, -0.9, -0.432, 0.732, -1.584, -1.448, 0.104, ... ], [ 1.4579582287541957, -1.784, 0.304, -0.096, 0.944, -1.788, -1.956, -0.156, ... ], [ -1.696, 1.1934185156457304, -0.368, 0.296, -1.94, -1.164, 0.987396693454565, 0.22, ... ], [ -1.564, 1.076, -0.328, 0.904, 1.2246469420397141, 0.5894858331356244, 0.788, 0.7221227850939294, ... ], ... ],
    	...
    ]

To produce the following output:

TrainingTester.java:633 executed in 0.17 seconds (0.000 gc):

    Result[] array = ConstantResult.batchResultArray(pop(RefUtil.addRef(data)));
    @Nullable
    Result eval = layer.eval(array);
    assert eval != null;
    TensorList tensorList = Result.getData(eval);
    String temp_18_0016 = tensorList.stream().limit(1).map(x -> {
      String temp_18_0017 = x.prettyPrint();
      x.freeRef();
      return temp_18_0017;
    }).reduce((a, b) -> a + "\n" + b).orElse("");
    tensorList.freeRef();
    return temp_18_0016;

Returns

    [
    	[ [ 1.335453987121582, 0.7615431547164917, 0.7017014622688293, 0.22745031118392944, 1.575545072555542, 0.6833966970443726, 1.7425004243850708, 1.1225104331970215, ... ], [ 1.7960000038146973, 0.9288151860237122, 0.7755213379859924, 1.4941128492355347, 0.5568525791168213, 1.4543421268463135, 1.784990906715393, 1.4540728330612183, ... ], [ 0.6455668807029724, 0.7504767775535583, 0.0804375559091568, 1.2766469717025757, 1.2160847187042236, 0.49601995944976807, 1.8976142406463623, 1.0262174606323242, ... ], [ -0.1903795450925827, 1.7451828718185425, 1.780866265296936, 1.4787497520446777, 1.7719999551773071, 1.5501937866210938, 1.6665714979171753, -0.5464505553245544, ... ], [ 0.946338415145874, 1.8387634754180908, 1.6242057085037231, 1.7615232467651367, 0.9401046633720398, 1.921738862991333, 0.22879526019096375, 1.1199862957000732, ... ], [ 1.431511402130127, 0.3957507014274597, 1.6752620935440063, 1.576192021369934, 1.725307583808899, 1.2395586967468262, 0.9481956958770752, 1.8550463914871216, ... ], [ 0.7407578825950623, 1.233634114265442, 1.5547834634780884, 0.9884175658226013, 0.6746924519538879, 1.6815568208694458, 1.7290109395980835, 1.29689621925354, ... ], [ 1.1053985357284546, 1.2371019124984741, 0.6600772738456726, 1.839875340461731, 0.4348370432853699, 1.350797176361084, 0.8392246961593628, 1.8040000200271606, ... ], ... ],
    	[ [ 0.9812976121902466, 0.9314403533935547, 1.6021028757095337, 1.6745104789733887, 0.990112841129303, 0.7052938342094421, 1.3041245937347412, 1.6768862009048462, ... ], [ 0.9359999895095825, 1.2336379289627075, 1.1820554733276367, 0.807224690914154, 1.6699109077453613, 1.3104867935180664, 0.6395649909973145, 1.0840436220169067, ... ], [ -0.9143022298812866, 1.7075450420379639, 1.1137899160385132, 0.363032728433609, 1.5495905876159668, 1.3677843809127808, -0.09367526322603226, 1.4390227794647217, ... ], [ 0.15199999511241913, 1.6525559425354004, 1.6783185005187988, 1.9116933345794678, 0.7687578797340393, 0.5004312992095947, 1.675127387046814, 1.0139108896255493, ... ], [ 1.0741127729415894, 1.2885795831680298, 0.9977862238883972, 1.4077270030975342, 1.521240234375, 1.4313766956329346, 1.7408288717269897, 1.609479546546936, ... ], [ 1.8016815185546875, 0.7456678152084351, 1.1308507919311523, 1.4974995851516724, 1.4973849058151245, 0.8691256046295166, 1.5554004907608032, 0.5136778354644775, ... ], [ 1.147066354751587, 1.336899995803833, 1.8237980604171753, 1.5979881286621094, 1.6991610527038574, 1.5086506605148315, 1.1073766946792603, 1.514365792274475, ... ], [ 0.938763439655304, 1.9250346422195435, 1.7653886079788208, 1.048195719718933, 0.4167379140853882, 1.5388743877410889, 1.6900018453598022, 1.0360000133514404, ... ], ... ],
    	[ [ 1.7731256484985352, 1.4656578302383423, 0.3177114725112915, 0.2149616926908493, 1.3962056636810303, 0.40038391947746277, 0.980431318283081, 0.6405085921287537, ... ], [ 0.3934858441352844, 1.8820455074310303, 1.682969093322754, 1.7288188934326172, -0.5431274175643921, 1.8988643884658813, 1.6182948350906372, 1.749000906944275, ... ], [ 1.531214714050293, 1.4131829738616943, 0.9777151942253113, -0.15349328517913818, 1.4243402481079102, 1.933647871017456, 1.3497824668884277, 1.629930853843689, ... ], [ 1.53803551197052, 1.3222711086273193, 1.506150245666504, 1.2572739124298096, 1.2786251306533813, 0.6768488883972168, 1.1550663709640503, 1.697695255279541, ... ], [ 1.3260055780410767, 1.1987260580062866, 0.1354166567325592, 1.843999981880188, 1.0911710262298584, 1.4880000352859497, 0.5913493633270264, 1.6888662576675415, ... ], [ 1.2230089902877808, 1.454941749572754, 0.5488326549530029, 1.7865678071975708, 1.55921471118927, 0.0718616470694542, 1.7293412685394287, 1.804977297782898, ... ], [ 1.5152621269226074, 1.076516032218933, 1.8605122566223145, 0.8517506718635559, 0.7213038206100464, 1.7348918914794922, 1.45840322971344, 0.5630564093589783, ... ], [ 0.4445185363292694, 1.9638416767120361, 0.9630700945854187, 1.5385241508483887, 0.0830427035689354, -0.2625814974308014, 1.5366606712341309, 1.7096515893936157, ... ], ... ],
    	[ [ 1.4880000352859497, 0.8383147716522217, -1.1025652885437012, 0.7360000014305115, 0.6558142900466919, 1.1049809455871582, 1.8566943407058716, 1.8616042137145996, ... ], [ 0.4201582968235016, 0.707521378993988, 0.7322356104850769, 1.3615905046463013, 1.4671810865402222, -0.20935864746570587, 0.9863458871841431, 1.3480000495910645, ... ], [ 1.4579582214355469, 1.3600000143051147, 0.6193829774856567, 0.8709753751754761, 1.8912384510040283, 0.7233967185020447, 0.6228843927383423, 1.1972739696502686, ... ], [ 0.9897251725196838, 1.1934185028076172, 1.8040000200271606, 1.4657999277114868, 1.2246469259262085, 0.5894858241081238, 0.9873967170715332, 0.7221227884292603, ... ], [ 1.2977488040924072, -0.04906200245022774, -0.45348331332206726, 0.6464630961418152, 1.1261228322982788, 1.3998416662216187, 0.04257403686642647, 0.7850645184516907, ... ], [ 1.9597843885421753, 0.9239463806152344, 0.8886332511901855, 1.7901601791381836, 0.33341851830482483, 0.874190092086792, 1.6641583442687988, 1.8701838254928589, ... ], [ 1.5248899459838867, 1.4477033615112305, 0.7218298316001892, 1.019008994102478, -0.2184942066669464, 1.868138313293457, 1.5397744178771973, 0.7442492842674255, ... ], [ 1.0915449857711792, 0.3363739550113678, 1.33124840259552, 1.317465901374817, 0.7653412222862244, 1.6354241371154785, 1.8926706314086914, 1.34019935131073, ... ], ... ],
    	[ [ 1.1413923501968384, 1.4302711486816406, 0.3383047878742218, 1.7382985353469849, 1.085546851158142, 0.9262611269950867, 0.9831075072288513, 0.7495431900024414, ... ], [ 0.4854559004306793, 0.9983384609222412, 1.479343056678772, 1.0232247114181519, 0.6780891418457031, 0.25030848383903503, 1.7178298234939575, 1.7285122871398926, ... ], [ 1.6099382638931274, 0.20817826688289642, 1.9402393102645874, 0.35680896043777466, 0.9548407793045044, 1.9213749170303345, 1.7391037940979004, 1.815295696258545, ... ], [ 1.1991909742355347, 1.743194818496704, 1.9403640031814575, 1.760828971862793, 1.6889199018478394, 0.8808388710021973, 1.1247715950012207, 1.3234777450561523, ... ], [ 0.6862137913703918, 1.7436060905456543, 1.2119863033294678, 1.2214758396148682, 0.21107636392116547, 1.0404748916625977, 1.5936516523361206, 1.2649298906326294, ... ], [ 0.7267734408378601, 1.8128762245178223, 1.4162729978561401, 1.5805222988128662, 1.210625171661377, 1.1145341396331787, 0.9784867763519287, 1.7117606401443481, ... ], [ 1.4508980512619019, 0.23890434205532074, 1.483669638633728, 0.8257388472557068, 0.9216578602790833, 0.502132773399353, 1.7230265140533447, 1.2892838716506958, ... ], [ 1.0776715278625488, 0.59593266248703, 1.8487852811813354, 0.39429107308387756, 1.4873768091201782, 0.8916796445846558, 1.9262511730194092, 1.575487732887268, ... ], ... ],
    	[ [ 0.45385977625846863, -0.04985726252198219, 1.216478705406189, 1.5415905714035034, 1.5886132717132568, 1.7093175649642944, 1.045499563217163, 0.6217325925827026, ... ], [ 1.6492003202438354, 0.7366743683815002, -0.12991459667682648, 0.4091555178165436, 1.6512047052383423, 1.5121819972991943, 0.6742948293685913, 1.8340791463851929, ... ], [ 1.7521246671676636, 0.40441757440567017, 1.4028507471084595, -0.1797780990600586, 1.989556908607483, 1.7568188905715942, 1.9371591806411743, 1.0189417600631714, ... ], [ 0.9782947897911072, 0.7315787076950073, 0.7940354943275452, 0.6197369694709778, 1.5908170938491821, 1.1886032819747925, 1.347161054611206, 0.6204749345779419, ... ], [ 0.7178398370742798, 1.7239999771118164, 1.012613296508789, 1.6399999856948853, 1.6553667783737183, 0.30399999022483826, 1.764775276184082, 1.6496516466140747, ... ], [ 0.41245123744010925, 1.7020118236541748, 0.9749990701675415, 1.5364686250686646, 1.7680000066757202, 1.4819681644439697, 1.3180454969406128, 1.7574084997177124, ... ], [ 1.921920895576477, 1.8072857856750488, 1.653489589691162, 1.6753767728805542, 1.392330288887024, 0.5074577331542969, 1.8092066049575806, 1.6748408079147339, ... ], [ 0.952957272529602, 1.2012938261032104, 1.5928562879562378, 1.8438180685043335, 1.6562730073928833, 1.5232146978378296, 1.7224094867706299, 0.4437980651855469, ... ], ... ],
    	[ [ 0.031824275851249695, -0.12227484583854675, 1.909364938735962, 0.4892365336418152, 1.4899208545684814, 1.8167753219604492, 1.8226251602172852, 1.178544044494629, ... ], [ 0.9990090727806091, 1.686898112297058, 1.5399999618530273, 0.649030864238739, 1.6043540239334106, 1.7159663438796997, 1.6404449939727783, 1.2020118236541748, ... ], [ 0.6161483526229858, 1.8824331760406494, 0.4897824823856354, 0.5406805872917175, 1.0341365337371826, 0.8615132570266724, 0.08696170151233673, 1.6233431100845337, ... ], [ 0.5240872502326965, 1.5995450019836426, 1.6222611665725708, 1.7290109395980835, 1.4857151508331299, 0.26606544852256775, 1.47348952293396, 1.7907733917236328, ... ], [ 1.573752522468567, 0.4095294773578644, 1.0971791744232178, 1.590466856956482, 1.0711573362350464, 1.980488657951355, 1.5720000267028809, 0.615272045135498, ... ], [ 1.7662885189056396, 1.5422375202178955, 1.1785677671432495, 1.8094558715820312, 1.8115350008010864, 0.5934184789657593, 1.597068190574646, 1.2812365293502808, ... ], [ 0.4396933615207672, 0.25780242681503296, 0.05730385333299637, 1.5731356143951416, 1.9330345392227173, 1.5120474100112915, 1.5819544792175293, 1.8911710977554321, ... ], [ -0.04853598400950432, 1.8329672813415527, 1.3849809169769287, 1.0634876489639282, 1.5261027812957764, 1.6153056621551514, 1.730658769607544, 1.3421028852462769, ... ], ... ],
    	[ [ 0.9710464477539062, 1.75371515750885, 0.9397132992744446, 0.23700718581676483, 1.4964549541473389, 1.7463995218276978, 1.5223321914672852, 1.6925559043884277, ... ], [ 1.5765459537506104, 1.20158052444458, 1.0319290161132812, 0.037010908126831055, -0.14011839032173157, 1.8017724752426147, 0.6519626379013062, 1.1407479047775269, ... ], [ 1.6381438970565796, 1.2200000286102295, 1.664067268371582, 1.8791711330413818, 0.9470327496528625, 1.5014421939849854, 0.8458734750747681, 0.8522056937217712, ... ], [ 0.9308980703353882, 0.9889772534370422, 1.7995686531066895, 1.4340218305587769, 1.6454085111618042, 1.4148781299591064, 1.7964786291122437, 1.852229356765747, ... ], [ 1.4428207874298096, 0.31718918681144714, 0.39926832914352417, 1.8457388877868652, 0.6362119317054749, 1.3170782327651978, 1.7001583576202393, 1.4818634986877441, ... ], [ 1.4844648838043213, -0.4379582405090332, 1.8061028718948364, 1.5557843446731567, 0.5429753661155701, 1.5516533851623535, 1.8460454940795898, 1.3830900192260742, ... ], [ 1.684000015258789, 1.4350128173828125, 0.11998630315065384, 0.9776678085327148, 1.0683403015136719, 0.24442756175994873, 1.2771356105804443, 1.9724550247192383, ... ], [ 1.5567716360092163, 1.1035724878311157, 1.6568763256072998, 0.506779670715332, 1.6124587059020996, 1.6358180046081543, 1.7233468294143677, 0.8139071464538574, ... ], ... ],
    	...
    ]

TrainingTester.java:432 executed in 0.14 seconds (0.000 gc):

    return TestUtil.compare(title + " vs Iteration", runs);
Logging
Plotting range=[0.0, -0.8465516069125543], [2.0, -0.17733438447391436]; valueStats=DoubleSummaryStatistics{count=3, sum=1.471902, min=0.142380, average=0.490634, max=0.664761}
Only 1 points for GD
Only 1 points for CjGD
Only 1 points for LBFGS

Returns

Result

TrainingTester.java:435 executed in 0.02 seconds (0.000 gc):

    return TestUtil.compareTime(title + " vs Time", runs);
Logging
Plotting range=[-1.0, -0.8465516069125543], [1.0, -0.17733438447391436]; valueStats=DoubleSummaryStatistics{count=3, sum=1.471902, min=0.142380, average=0.490634, max=0.664761}
Only 1 points for GD
Only 1 points for CjGD
Only 1 points for LBFGS

Returns

Result

Results

TrainingTester.java:255 executed in 0.00 seconds (0.000 gc):

    return grid(inputLearning, modelLearning, completeLearning);

Returns

Result

TrainingTester.java:258 executed in 0.00 seconds (0.000 gc):

    return new ComponentResult(null == inputLearning ? null : inputLearning.value,
        null == modelLearning ? null : modelLearning.value, null == completeLearning ? null : completeLearning.value);

Returns

    {"input":{ "LBFGS": { "type": "NonConverged", "value": 0.6647611271400282 }, "CjGD": { "type": "NonConverged", "value": 0.14237980476574585 }, "GD": { "type": "NonConverged", "value": 0.6647611271400282 } }, "model":null, "complete":null}

LayerTests.java:425 executed in 0.00 seconds (0.000 gc):

    throwException(exceptions.addRef());

Results

detailsresult
{"input":{ "LBFGS": { "type": "NonConverged", "value": 0.6647611271400282 }, "CjGD": { "type": "NonConverged", "value": 0.14237980476574585 }, "GD": { "type": "NonConverged", "value": 0.6647611271400282 } }, "model":null, "complete":null}OK
  {
    "result": "OK",
    "performance": {
      "execution_time": "213.454",
      "gc_time": "16.365"
    },
    "created_on": 1586736170628,
    "file_name": "trainingTest",
    "report": {
      "simpleName": "Float",
      "canonicalName": "com.simiacryptus.mindseye.layers.cudnn.PoolingLayerTest.Float",
      "link": "https://github.com/SimiaCryptus/mindseye-cudnn/tree/59d5b3318556370acb2d83ee6ec123ce0fc6974f/src/test/java/com/simiacryptus/mindseye/layers/cudnn/PoolingLayerTest.java",
      "javaDoc": ""
    },
    "training_analysis": {
      "input": {
        "LBFGS": {
          "type": "NonConverged",
          "value": 0.6647611271400282
        },
        "CjGD": {
          "type": "NonConverged",
          "value": 0.14237980476574585
        },
        "GD": {
          "type": "NonConverged",
          "value": 0.6647611271400282
        }
      }
    },
    "archive": "s3://code.simiacrypt.us/tests/com/simiacryptus/mindseye/layers/cudnn/PoolingLayer/Float/trainingTest/202004130250",
    "id": "0c49fd5c-d2cc-41a9-b212-a94b7961bcdf",
    "report_type": "Components",
    "display_name": "Comparative Training",
    "target": {
      "simpleName": "PoolingLayer",
      "canonicalName": "com.simiacryptus.mindseye.layers.cudnn.PoolingLayer",
      "link": "https://github.com/SimiaCryptus/mindseye-cudnn/tree/59d5b3318556370acb2d83ee6ec123ce0fc6974f/src/main/java/com/simiacryptus/mindseye/layers/cudnn/PoolingLayer.java",
      "javaDoc": ""
    }
  }