1. Test Modules
  2. Training Characteristics
    1. Input Learning
      1. Gradient Descent
      2. Conjugate Gradient Descent
      3. Limited-Memory BFGS
    2. Results
  3. Results

Subreport: Logs for com.simiacryptus.ref.lang.ReferenceCountingBase

Test Modules

Using Seed 3438222257944906752

Training Characteristics

Input Learning

In this apply, we use a network to learn this target input, given it's pre-evaluated output:

TrainingTester.java:332 executed in 0.06 seconds (0.000 gc):

    return RefArrays.stream(RefUtil.addRef(input_target)).flatMap(RefArrays::stream).map(x -> {
      try {
        return x.prettyPrint();
      } finally {
        x.freeRef();
      }
    }).reduce((a, b) -> a + "\n" + b).orElse("");

Returns

    [
    	[ [ 1.828, 0.68, 0.276, -0.576, -1.708, -0.46, 0.06, 0.956, ... ], [ 1.812, -1.092, 1.624, 0.856, -1.996, -0.5, 0.92, -0.748, ... ], [ -0.72, 1.896, 1.956, -1.428, 0.036, -1.48, 1.232, 0.744, ... ], [ 1.968, 1.492, -0.488, 0.008, 1.768, 1.78, -1.336, -0.308, ... ], [ -0.148, -1.68, 0.276, -1.8, -1.484, -1.796, 1.964, -0.824, ... ], [ 1.628, -1.368, -0.888, -1.632, -1.636, 1.4, 1.84, 0.544, ... ], [ 1.82, -0.128, -1.14, 0.136, 1.04, 1.288, -1.812, -1.904, ... ], [ 0.068, -1.54, 0.288, -0.42, 0.88, -1.248, -0.58, -0.236, ... ], ... ],
    	[ [ -0.844, -0.164, 1.328, -1.364, -0.464, -1.844, 1.064, 1.576, ... ], [ -1.12, -0.448, 0.66, -1.364, -0.644, 0.692, 1.06, -0.452, ... ], [ -0.82, -0.832, 0.944, -1.96, -1.132, -0.116, -1.384, -0.436, ... ], [ -1.556, 0.692, 1.532, -1.156, 0.32, -0.54, 1.66, -1.944, ... ], [ 0.308, 1.112, 1.264, 1.544, -0.236, 1.476, 0.952, 1.412, ... ], [ -0.98, -0.268, -1.724, -0.644, 0.572, 0.88, 1.344, -1.136, ... ], [ -0.356, -0.996, -0.972, -1.7, -1.892, 1.316, 0.724, -1.328, ... ], [ -0.416, -0.556, -1.192, 1.452, 0.712, 0.268, -0.668, -0.788, ... ], ... ],
    	[ [ 1.888, -0.164, -0.088, -0.764, 1.476, -0.536, -0.164, 0.172, ... ], [ 0.768, 1.824, -1.1, 1.728, 1.98, 0.016, -0.592, 1.472, ... ], [ -1.776, 0.416, 1.868, 0.424, -1.04, 0.808, -1.856, 1.344, ... ], [ 0.844, -0.296, 1.016, -0.664, -1.392, -1.38, -1.552, 1.16, ... ], [ 1.092, 1.0, -0.208, 1.692, 0.344, 0.424, -0.516, -1.56, ... ], [ -0.076, -0.384, 0.468, 1.756, -1.416, -1.86, -0.564, -1.004, ... ], [ -0.2, 1.528, -1.46, -1.472, -0.376, 0.644, 0.18, 0.124, ... ], [ 1.368, 0.952, -1.032, 1.98, 0.564, -1.18, -0.376, -1.992, ... ], ... ],
    	[ [ -0.34, 1.304, -1.924, -0.188, -1.18, 1.244, 1.412, 1.128, ... ], [ -1.312, -0.872, -1.412, 0.5, 1.108, -0.184, -0.964, -0.54, ... ], [ -0.884, -1.636, 0.232, -0.652, -1.824, -0.76, 0.232, -0.104, ... ], [ -1.096, -0.832, -0.688, -0.152, 1.476, 0.14, -0.772, -0.6, ... ], [ -1.896, -0.504, -0.32, -1.22, -1.152, -1.484, -0.564, 1.688, ... ], [ 0.832, -0.284, -0.324, -1.824, -0.608, -0.98, 0.08, 1.528, ... ], [ 1.708, -0.808, -0.044, -1.624, -1.396, -0.7, 0.184, 1.712, ... ], [ -0.9, -0.124, -1.2, 0.532, 0.752, 0.908, -0.916, -1.524, ... ], ... ],
    	[ [ 1.488, 1.404, -1.82, 1.352, -1.628, -0.7, -1.704, 1.3, ... ], [ 0.152, 1.792, 0.056, 0.716, -0.2, 0.544, -1.044, 0.272, ... ], [ 0.208, 0.416, -1.344, -1.496, 0.668, 1.344, 1.292, 0.492, ... ], [ 1.148, -0.084, -1.668, -1.992, 0.028, 1.528, -0.512, -1.512, ... ], [ 0.308, -0.264, 0.844, 1.248, -0.36, -0.972, 0.948, 1.552, ... ], [ -0.308, 0.06, -1.824, -1.476, 0.536, -1.624, -1.288, -1.296, ... ], [ 0.876, -0.716, 0.972, 1.868, -0.656, -1.132, -0.828, 0.244, ... ], [ 1.148, -1.732, -0.612, 0.904, -1.844, 0.304, -1.864, 0.672, ... ], ... ],
    	[ [ 0.396, 1.188, -1.504, -0.968, -1.196, -1.668, -1.868, 1.508, ... ], [ -1.736, 0.452, 0.776, -0.844, 0.752, -0.22, -0.48, 1.984, ... ], [ 0.896, 1.824, 0.008, 0.7, -0.752, -0.384, 0.216, -1.348, ... ], [ 0.692, -1.292, -0.064, -0.948, -1.016, -1.068, 1.228, -1.6, ... ], [ -1.792, -1.404, 1.016, -1.348, 1.08, 0.42, 0.956, -0.416, ... ], [ 0.276, 1.636, -0.888, -0.076, 0.044, -0.06, 1.052, -0.616, ... ], [ 1.196, -1.176, -1.204, 1.168, 0.58, -1.4, -1.176, -1.904, ... ], [ 1.836, 1.192, 1.308, -1.9, 0.556, 0.636, -1.124, 1.92, ... ], ... ],
    	[ [ -0.548, 1.796, 1.152, -0.248, 0.24, -1.84, 1.032, 0.16, ... ], [ -0.56, -0.052, -1.556, -0.804, 0.348, 0.228, -0.212, -0.012, ... ], [ 0.724, 0.508, -1.556, -1.744, 1.052, -1.096, -1.552, -0.484, ... ], [ 0.732, 0.212, 1.184, -0.856, 1.26, 1.32, -1.096, -0.668, ... ], [ 0.0, -0.376, -0.892, 1.508, -0.996, -1.452, 1.036, -1.168, ... ], [ -1.648, 1.86, 0.12, -0.74, 1.996, 1.632, -1.408, 0.144, ... ], [ -0.168, -0.628, 1.472, 1.168, -0.272, 1.628, -1.996, -0.896, ... ], [ -1.596, -1.36, 0.452, 0.424, 1.26, -1.064, 1.112, -1.292, ... ], ... ],
    	[ [ -1.512, -1.9, -1.268, -0.912, 0.496, 1.812, 1.116, 1.648, ... ], [ 1.996, 0.96, -0.612, -0.236, -1.296, 0.616, -0.1, -1.74, ... ], [ 1.076, -0.184, 1.628, -1.436, -1.328, -1.364, -0.632, -0.552, ... ], [ 1.396, -1.744, 1.98, 1.868, 1.832, -0.24, -0.272, -1.224, ... ], [ 1.312, 1.38, 1.456, -1.58, 0.736, -1.228, 1.064, 1.912, ... ], [ 1.856, 0.724, -0.68, 1.3, -1.876, 1.696, 0.184, 0.972, ... ], [ -1.276, 1.584, -1.744, -0.128, 1.896, -0.776, -1.248, -0.444, ... ], [ 1.712, 0.096, -0.2, -0.9, -0.636, 1.94, -1.116, -1.544, ... ], ... ],
    	...
    ]
    [
    	[ [ 0.016, 0.276, -0.608, -1.736, -1.42, 1.356, -0.084, 1.416, ... ], [ 0.908, 1.008, 0.784, 0.736, 1.432, 0.528, 1.708, -0.104, ... ], [ -1.26, -1.012, -1.428, -0.724, -1.104, 1.408, 1.66, -1.94, ... ], [ -1.596, -1.596, 1.248, 0.36, -1.516, -1.128, 0.908, -1.204, ... ], [ -0.276, 0.612, -0.44, -0.788, -0.484, 0.36, -0.536, -0.236, ... ], [ 1.884, 0.848, 0.584, -1.264, -1.572, 0.1, -0.676, -1.624, ... ], [ -0.256, -1.1, -1.396, 1.384, -0.56, 1.556, 1.752, -1.484, ... ], [ 1.828, -0.468, -1.296, -0.852, 0.504, -0.276, 0.392, 0.296, ... ], ... ],
    	[ [ 0.364, -0.256, 1.48, 0.472, -0.68, 1.052, 0.088, 0.768, ... ], [ -1.364, -0.312, -0.032, 1.796, 0.408, -1.816, 1.744, 1.932, ... ], [ -1.408, 0.308, -0.596, -0.952, 0.552, -1.596, -0.008, -1.44, ... ], [ 1.22, 1.508, -1.98, 1.28, -1.388, 1.908, 0.08, 1.172, ... ], [ 1.064, -0.188, -1.652, 1.12, 1.56, -0.04, 2.0, -1.356, ... ], [ 0.132, 0.044, -1.928, 0.032, -0.908, -1.912, -0.652, 0.364, ... ], [ 1.924, 0.668, -0.16, 0.668, 0.116, -1.44, 0.576, -1.704, ... ], [ 1.792, 0.048, -1.22, 1.024, -1.092, -1.332, -0.408, 0.188, ... ], ... ],
    	[ [ -1.904, -1.908, -0.704, 1.76, 0.04, 0.764, 1.0, 1.536, ... ], [ 1.156, -0.268, -1.924, -1.412, -0.276, -1.596, 0.792, -1.508, ... ], [ 1.76, -1.464, 1.76, -1.54, 0.552, -1.816, 1.324, 1.376, ... ], [ 1.644, -1.212, -0.872, -0.156, -0.584, -0.988, 0.024, 0.816, ... ], [ 1.7, -1.292, -0.028, 1.532, -1.672, 1.596, -0.196, 1.336, ... ], [ 0.736, -1.38, -1.056, -0.732, -1.4, 1.044, -1.748, 0.04, ... ], [ -0.384, 0.524, -0.504, -0.004, 0.564, 1.596, 0.412, 1.876, ... ], [ -1.2, 0.828, -0.884, 1.644, -0.152, -1.604, 0.14, 0.78, ... ], ... ],
    	[ [ -0.384, -1.248, -1.516, 1.292, -0.368, 0.624, -0.54, -0.944, ... ], [ -0.524, -0.3, -0.148, 0.052, 1.832, 1.22, -1.356, -0.256, ... ], [ 1.94, 1.888, 0.34, 1.908, -0.42, 0.636, -1.44, 0.832, ... ], [ 0.384, -1.092, -0.948, 1.524, 1.48, -0.9, 0.932, 1.392, ... ], [ -0.252, 1.056, 1.46, 1.008, -0.52, 1.356, 1.092, 1.976, ... ], [ -0.532, 0.788, -0.58, -1.472, 0.148, 1.984, -0.832, -0.624, ... ], [ 1.376, 1.828, 1.212, 0.508, 1.668, -1.272, 0.528, -1.156, ... ], [ -0.752, -0.896, 1.46, 0.064, 1.648, -0.04, 0.728, 0.38, ... ], ... ],
    	[ [ -0.36, -0.34, 1.132, 1.136, 1.06, 0.216, -1.812, -1.2, ... ], [ 0.932, 0.456, 0.676, -0.068, 1.62, -0.256, 0.776, 0.912, ... ], [ 0.772, -1.068, -1.936, -0.316, -1.376, -0.836, 0.676, 0.244, ... ], [ 1.336, 0.628, -1.036, 0.684, 1.92, 0.608, 0.74, 1.012, ... ], [ 0.52, 0.628, -0.128, 1.796, 1.98, 1.308, 0.516, 1.936, ... ], [ 1.544, 0.872, -1.724, 0.332, -1.624, -0.652, -1.688, 1.208, ... ], [ -1.852, -1.508, 1.564, -0.196, 1.5, -1.1, 0.972, -1.164, ... ], [ 1.148, 0.936, 1.136, -0.696, -0.28, 1.68, -0.656, -1.172, ... ], ... ],
    	[ [ -0.212, 1.476, 1.1, 0.452, 1.248, 0.98, -1.148, -1.952, ... ], [ -1.236, -1.696, -1.4, 0.068, -1.048, -1.472, 0.696, -0.388, ... ], [ 1.22, -0.42, 1.4, 0.512, 1.664, 0.996, 1.796, -0.732, ... ], [ 0.424, -1.116, 0.204, -0.976, -0.652, 1.472, 0.108, 0.328, ... ], [ 0.704, 0.4, 0.428, -0.44, 0.332, 1.608, -0.552, -1.048, ... ], [ 1.88, -1.716, -1.068, -0.904, -1.884, -1.468, -0.216, -1.164, ... ], [ -1.396, 1.852, 1.384, 0.908, 0.588, -1.884, 1.94, -1.172, ... ], [ 0.044, -0.652, -0.648, -1.484, -0.684, -1.792, -1.684, 1.884, ... ], ... ],
    	[ [ -0.228, 1.908, -1.748, 0.052, 1.624, 1.34, 0.8, -0.812, ... ], [ 0.488, 1.86, -0.24, 1.216, 0.764, 1.976, -1.996, 1.496, ... ], [ -0.648, -1.26, 0.632, -1.964, -0.016, 1.044, 0.06, -0.792, ... ], [ 1.228, -1.368, -0.42, -1.884, 0.296, -1.46, -0.912, -0.796, ... ], [ -1.18, -1.96, 0.292, -0.32, -1.164, 0.952, 0.228, -1.744, ... ], [ -1.824, 0.572, -0.564, 1.536, 0.532, -1.052, 0.912, 0.0, ... ], [ -0.232, 1.236, -1.832, 1.176, -1.8, 1.42, 1.4, -1.52, ... ], [ 0.056, 0.388, 0.372, 0.628, 0.64, -1.872, -1.38

...skipping 5535 bytes...

    956, 1.764, 0.128, -0.568, -1.868, -1.748, 1.708, -0.232, ... ], [ 0.436, -0.156, -1.552, 1.08, -0.888, 1.66, -1.08, -0.752, ... ], [ 0.248, -1.796, -0.788, 1.748, -0.784, 1.452, 0.032, -1.228, ... ], [ -0.184, -0.708, 0.848, -0.552, 1.848, 0.136, -0.268, 1.416, ... ], [ 0.352, -0.652, -1.76, -1.964, -1.368, -1.26, 1.784, 0.552, ... ], [ -1.096, -1.656, -0.32, 1.684, -1.092, 0.48, -1.388, 1.284, ... ], [ -0.744, -0.952, -1.704, -1.924, 0.032, 1.504, -0.528, -1.036, ... ], [ -0.248, 1.688, 0.3, -1.456, -0.516, 0.2, 0.552, -0.504, ... ], ... ],
    	[ [ -1.392, -1.012, 1.96, -0.024, 0.524, 0.748, -1.808, -1.46, ... ], [ 1.456, 1.608, 0.352, 1.836, 1.448, 0.168, 0.14, 0.444, ... ], [ 1.756, -1.932, 1.224, -0.12, -0.624, -0.684, 0.5, -0.772, ... ], [ 1.004, -0.628, 1.968, -0.036, -0.904, 0.384, -0.344, 1.864, ... ], [ 0.716, 1.428, 0.948, -1.576, 1.392, -0.94, 1.692, -1.08, ... ], [ -1.524, -0.412, 0.992, -1.044, 0.136, -0.92, 0.296, 1.916, ... ], [ -0.18, 0.164, -0.88, -0.436, 0.86, -0.788, -0.756, 1.992, ... ], [ -0.972, -0.496, 0.2, 0.78, -1.136, 1.064, -1.276, -0.284, ... ], ... ],
    	[ [ -1.608, 1.864, -0.124, 1.728, 1.356, 1.692, 1.328, -1.744, ... ], [ -0.172, 1.032, -1.816, 0.084, -0.82, -0.608, 0.108, 1.036, ... ], [ -1.936, -1.956, 0.236, -0.528, 0.456, 0.676, 0.368, 0.92, ... ], [ -1.512, -1.28, -0.16, 1.78, -0.372, -1.412, -0.848, -1.852, ... ], [ 0.088, -1.788, -1.56, 0.656, 0.928, 0.208, 0.024, -1.1, ... ], [ 1.144, 1.74, 0.476, 1.452, -0.336, 1.596, -1.076, 1.684, ... ], [ 1.916, -1.636, 0.388, -0.94, -1.62, 0.476, -0.288, -0.1, ... ], [ 1.144, -1.092, 0.916, 0.42, 0.312, -0.904, 1.492, -0.552, ... ], ... ],
    	[ [ 0.712, 0.42, 0.032, 1.984, -1.748, -1.532, 1.884, -0.124, ... ], [ -1.388, 0.232, -1.204, 1.74, -0.896, 0.46, 1.168, -1.252, ... ], [ 0.776, -1.456, 1.052, -0.376, 0.872, -1.904, 0.692, -0.808, ... ], [ 0.432, 1.62, -1.388, -1.488, -0.8, 1.612, 1.228, -1.516, ... ], [ -1.644, -1.604, 1.072, 1.616, 1.372, 0.56, -0.3, 0.844, ... ], [ -0.616, 1.468, -1.244, -1.84, -0.352, 0.228, 1.352, 0.848, ... ], [ 0.392, -0.356, -1.836, -0.228, 1.384, 1.244, -0.188, 1.76, ... ], [ 0.756, 0.876, 1.444, 0.192, -0.868, 1.376, 1.192, 0.984, ... ], ... ],
    	[ [ -0.464, 0.572, 0.752, -1.352, -0.808, 0.952, 1.404, -1.168, ... ], [ 0.648, 1.34, 1.264, 1.092, 1.68, 0.008, -1.096, -1.616, ... ], [ 0.096, -0.104, -0.464, -1.144, -1.452, -0.468, 1.008, 1.496, ... ], [ 0.92, 1.292, 1.324, 1.14, 0.828, -0.392, -1.956, 1.94, ... ], [ -2.0, -1.54, 0.148, 0.356, -0.68, 1.028, -1.348, -1.748, ... ], [ 1.7, -0.516, -1.68, -1.868, -1.456, -1.988, -0.732, 0.668, ... ], [ -0.744, -1.86, -1.06, 1.704, -1.44, -1.424, 0.792, -0.108, ... ], [ -1.936, 0.66, -0.38, -1.528, -0.548, 0.94, -0.616, -0.88, ... ], ... ],
    	[ [ -0.212, 0.708, -1.56, 0.54, 1.252, -0.424, -0.84, 0.668, ... ], [ 1.036, 0.084, 0.212, 0.296, 0.452, -1.92, -1.74, 0.368, ... ], [ -0.376, 1.572, -1.668, 0.612, 1.62, -0.088, -1.444, -0.4, ... ], [ -0.772, -0.684, 0.86, 1.844, 0.28, -0.68, -0.528, -0.676, ... ], [ 1.868, 1.432, 0.008, 1.46, 0.372, 1.76, 1.424, 0.976, ... ], [ 1.652, -1.46, -0.008, -1.288, 1.912, -0.004, 1.288, -0.36, ... ], [ 0.676, -0.888, 1.208, -0.152, -1.604, 1.312, -1.148, 1.128, ... ], [ 1.492, 1.88, 1.176, 0.424, 0.828, -1.208, 1.412, 1.144, ... ], ... ],
    	[ [ 0.32, -1.788, -0.636, 0.968, 0.296, -1.012, -0.552, 1.004, ... ], [ -0.808, -1.376, 1.524, -0.252, -1.52, 1.468, -1.48, -0.928, ... ], [ -0.952, -0.088, 0.384, 1.46, -1.208, -0.576, 0.108, 1.164, ... ], [ 1.676, 0.964, 0.116, 1.512, -0.86, -0.64, -1.624, 1.252, ... ], [ 1.964, 1.216, -0.66, 0.46, -1.696, -0.852, -1.288, -1.152, ... ], [ 1.404, -1.4, -0.86, -0.66, 0.428, 0.996, -0.128, 0.832, ... ], [ 1.076, 1.76, 1.424, -1.068, -0.556, 0.448, -0.06, 1.108, ... ], [ -0.52, 0.988, -1.76, 0.1, -1.14, -0.712, 0.32, -1.092, ... ], ... ],
    	...
    ]
    [
    	[ [ -1.916, -0.324, 0.4, 0.828, 1.072, -1.436, 0.588, 0.856, ... ], [ -0.304, 1.468, 0.116, 0.384, 1.016, -0.792, -0.156, -0.336, ... ], [ 1.736, 0.732, 1.232, -0.68, 0.808, 1.192, -1.884, -1.488, ... ], [ -0.396, 1.304, 0.036, 0.06, -1.428, -0.984, 0.436, -1.248, ... ], [ -1.82, 1.916, 0.052, 0.736, -1.172, -0.928, -0.66, 1.172, ... ], [ 0.008, 1.248, 1.312, -0.344, -1.324, -1.428, -1.356, -1.836, ... ], [ 0.528, 1.744, -1.272, 1.5, 1.764, -0.64, -0.768, 1.648, ... ], [ -1.376, 0.788, -0.316, -1.14, 1.972, 1.132, -0.144, -0.152, ... ], ... ],
    	[ [ 0.568, 1.848, -1.124, -0.868, 0.172, -0.856, -0.468, 0.436, ... ], [ 0.156, 1.596, 1.56, -0.432, 1.716, 1.44, 1.268, -1.224, ... ], [ 0.252, -0.66, -0.872, -0.976, -1.104, 1.568, 1.4, 0.02, ... ], [ 1.956, -1.912, -0.88, -1.92, 1.644, 0.752, 0.712, 0.64, ... ], [ 1.652, -1.732, -0.736, -0.244, 0.84, -1.856, -0.896, -0.168, ... ], [ 0.044, -0.66, -1.812, -1.08, -1.78, -0.092, 1.684, 1.316, ... ], [ -0.204, -0.944, -0.372, 1.084, 1.628, 1.8, 1.248, 1.324, ... ], [ -1.996, 0.9, 1.76, 1.216, 0.336, 0.28, 1.096, -0.392, ... ], ... ],
    	[ [ -1.368, -1.68, 0.2, 1.888, -1.144, 0.28, -1.936, 1.56, ... ], [ 1.188, 0.268, -0.08, -1.228, -1.28, -0.688, -1.968, 1.316, ... ], [ 0.148, -0.936, 1.808, -0.104, 1.992, -1.132, 1.008, -1.636, ... ], [ 0.824, -0.792, 0.88, 1.092, 0.172, 0.344, -1.156, -0.368, ... ], [ -0.828, 0.076, 1.816, -1.836, -0.832, 0.66, 0.832, 1.252, ... ], [ -1.732, 0.612, 0.888, -1.52, 0.896, 1.504, -1.576, -1.628, ... ], [ -1.3, -1.224, -1.636, -0.312, -1.68, 1.616, -0.644, -0.708, ... ], [ 1.936, 0.52, 1.324, 1.612, 1.688, 1.272, -0.38, -0.368, ... ], ... ],
    	[ [ 1.9, -1.588, 1.616, -0.684, 0.66, 0.308, -0.924, 0.332, ... ], [ -1.896, -0.412, -1.492, -1.336, -1.316, -0.448, -0.028, -0.3, ... ], [ 1.848, 0.9, -1.384, -0.016, 1.22, 1.404, -1.312, 0.772, ... ], [ -1.368, 0.584, -1.9, 0.504, 1.136, -1.364, -1.892, 1.244, ... ], [ -1.024, -1.012, -0.612, 1.116, 1.448, 0.996, -0.24, 0.144, ... ], [ 1.008, -0.972, -1.392, -0.244, -1.148, -0.452, -0.744, 0.084, ... ], [ -1.328, 0.836, 0.288, 1.736, 0.816, 0.2, -0.02, -1.804, ... ], [ 1.356, 0.992, 1.264, 0.68, 0.264, 1.128, -0.236, -1.86, ... ], ... ],
    	[ [ -0.448, 1.868, 1.932, -0.772, 0.924, 0.768, -0.048, -1.988, ... ], [ -1.82, 0.072, -0.308, 1.216, 1.968, -1.636, -0.516, 1.436, ... ], [ -0.484, 1.056, -1.32, 1.092, -1.164, 1.736, -1.544, 0.224, ... ], [ -1.612, 0.704, 1.804, -1.0, -0.04, -1.452, -1.728, 1.504, ... ], [ -0.256, 1.48, 0.288, 1.0, 0.38, -0.844, -0.68, 0.308, ... ], [ 1.548, -0.664, -1.168, 1.252, -0.24, 1.94, 1.668, -1.312, ... ], [ -0.992, 0.272, -1.352, 0.38, 0.376, 0.672, -0.372, 0.98, ... ], [ 0.192, -1.22, -1.416, 0.26, -0.164, -0.972, 1.368, -1.728, ... ], ... ],
    	[ [ 0.42, 0.004, 0.104, -0.304, -1.364, 1.868, 0.16, -1.464, ... ], [ 1.224, 1.92, -0.916, -1.896, 1.26, 0.5, 1.68, -1.008, ... ], [ -1.628, -0.444, -1.076, -1.584, 0.768, -1.92, -0.872, 0.04, ... ], [ -1.764, 1.58, 0.196, -0.4, 1.28, 1.008, 0.644, -1.548, ... ], [ -0.108, -1.312, 1.0, -0.276, -0.484, 1.308, 0.932, -0.124, ... ], [ -1.004, -1.276, 0.736, 1.308, -0.52, -1.392, -1.028, 0.184, ... ], [ 1.752, -1.58, -0.992, -0.112, 1.372, -1.972, -0.244, -1.624, ... ], [ -1.1, -1.82, 0.984, 1.432, -1.1, -0.176, -0.996, 0.488, ... ], ... ],
    	[ [ 0.836, -1.388, 0.912, 0.012, 1.836, -0.82, -1.004, -0.484, ... ], [ -0.924, -1.6, -1.22, -0.984, 0.452, -1.828, 1.46, -0.932, ... ], [ 1.668, 0.268, -1.048, 0.48, 0.516, -1.508, 1.22, -1.204, ... ], [ 0.732, 1.196, -1.992, 0.192, 1.02, 1.64, -1.368, -1.452, ... ], [ 0.32, 0.68, 1.432, 1.496, 1.992, 0.772, 0.748, 1.404, ... ], [ 0.144, -1.008, -0.316, -0.168, 1.544, 1.276, 0.72, 0.812, ... ], [ -1.976, 1.12, -0.664, -1.816, 1.912, 0.276, 1.788, -1.776, ... ], [ -1.076, 1.004, -0.476, 1.372, -0.572, 0.616, -0.58, -1.336, ... ], ... ],
    	[ [ -0.436, -0.52, -0.756, -1.744, -1.232, -1.084, -1.808, 1.44, ... ], [ 1.5, 1.356, 0.8, 1.592, -0.9, -0.288, 1.372, -1.468, ... ], [ 1.892, -1.276, -0.768, 1.628, 0.032, 0.012, 1.34, 1.152, ... ], [ 0.5, -0.492, 0.908, -0.696, -0.544, 0.644, 1.524, 1.576, ... ], [ -1.32, -0.732, -0.82, 0.716, 0.24, -0.564, 1.232, 0.604, ... ], [ -0.128, -1.644, -1.392, -1.452, 1.28, -1.948, 0.884, -1.308, ... ], [ 1.844, 1.148, -1.08, 1.264, -1.936, -0.292, -0.324, 1.948, ... ], [ 1.4, -1.224, -0.02, -1.76, 1.748, 0.132, 1.66, -1.7, ... ], ... ],
    	...
    ]

Gradient Descent

First, we train using basic gradient descent method apply weak line search conditions.

TrainingTester.java:480 executed in 42.37 seconds (6.065 gc):

    IterativeTrainer iterativeTrainer = new IterativeTrainer(trainable.addRef());
    try {
      iterativeTrainer.setLineSearchFactory(label -> new ArmijoWolfeSearch());
      iterativeTrainer.setOrientation(new GradientDescent());
      iterativeTrainer.setMonitor(TrainingTester.getMonitor(history));
      iterativeTrainer.setTimeout(30, TimeUnit.SECONDS);
      iterativeTrainer.setMaxIterations(250);
      iterativeTrainer.setTerminateThreshold(0);
      return iterativeTrainer.run();
    } finally {
      iterativeTrainer.freeRef();
    }
Logging
Reset training subject: 608840184221
Reset training subject: 610507414417
Constructing line search parameters: GD
th(0)=0.31693414565;dx=-1.3156303124687387E-7
New Minimum: 0.31693414565 > 0.31693386267619117
WOLFE (weak): th(2.154434690031884)=0.31693386267619117; dx=-1.3134477054529872E-7 evalInputDelta=2.8297380882946754E-7
New Minimum: 0.31693386267619117 > 0.31693357970254
WOLFE (weak): th(4.308869380063768)=0.31693357970254; dx=-1.3134469738356545E-7 evalInputDelta=5.659474600072656E-7
New Minimum: 0.31693357970254 > 0.31693244780951135
WOLFE (weak): th(12.926608140191302)=0.31693244780951135; dx=-1.3134440473663485E-7 evalInputDelta=1.697840488645852E-6
New Minimum: 0.31693244780951135 > 0.31692735432209185
WOLFE (weak): th(51.70643256076521)=0.31692735432209185; dx=-1.3134308782544688E-7 evalInputDelta=6.791327908151068E-6
New Minimum: 0.31692735432209185 > 0.316900189918363
WOLFE (weak): th(258.53216280382605)=0.316900189918363; dx=-1.3133606429911192E-7 evalInputDelta=3.395573163700316E-5
New Minimum: 0.316900189918363 > 0.31673044530656747
WOLFE (weak): th(1551.1929768229563)=0.31673044530656747; dx=-1.312921672595173E-7 evalInputDelta=2.0370034343253351E-4
New Minimum: 0.31673044530656747 > 0.3155100608440427
WOLFE (weak): th(10858.350837760694)=0.3155100608440427; dx=-1.3091620745180413E-7 evalInputDelta=0.001424084805957293
New Minimum: 0.3155100608440427 > 0.3057295326412294
WOLFE (weak): th(86866.80670208555)=0.3057295326412294; dx=-1.2644451186133647E-7 evalInputDelta=0.011204613008770625
New Minimum: 0.3057295326412294 > 0.23077524214427925
END: th(781801.26031877)=0.23077524214427925; dx=-9.081368023258879E-8 evalInputDelta=0.08615890350572075
Fitness changed from 0.31693414565 to 0.23077524214427925
Iteration 1 complete. Error: 0.23077524214427925 Total: 42.3525; Orientation: 2.1473; Line Search: 34.8902
Final threshold in iteration 1: 0.23077524214427925 (> 0.0) after 42.354s (< 30.000s)

Returns

    0.23077524214427925

This training apply resulted in the following configuration:

TrainingTester.java:610 executed in 0.00 seconds (0.000 gc):

    RefList<double[]> state = network.state();
    assert state != null;
    String description = state.stream().map(RefArrays::toString).reduce((a, b) -> a + "\n" + b)
        .orElse("");
    state.freeRef();
    return description;

Returns

    

And regressed input:

TrainingTester.java:622 executed in 0.01 seconds (0.000 gc):

    return RefArrays.stream(RefUtil.addRef(data)).flatMap(x -> {
      return RefArrays.stream(x);
    }).limit(1).map(x -> {
      String temp_18_0015 = x.prettyPrint();
      x.freeRef();
      return temp_18_0015;
    }).reduce((a, b) -> a + "\n" + b).orElse("");

Returns

    [
    	[ [ 0.044, -0.236, -0.1, 0.756, 0.58, 1.5452639335890868, -0.816, 1.8569099369840614, ... ], [ -1.084, -0.788, -1.76, -1.088, -0.216, 1.348, -1.688, -0.3, ... ], [ 1.044, 1.5786114154168385, -1.972, -0.148, 1.6047890087198575, -0.18, 0.8001009718870055, 0.716, ... ], [ -1.628, -0.26, -0.732, 1.036, -0.424, -0.932, 0.548, -1.952, ... ], [ 1.7352483713056817, -0.064, 1.468, -0.476, -1.948, 0.9969635506569285, 0.4, 1.22, ... ], [ 0.384, 1.504, 1.092, 1.5750800848273392, 1.844, 0.88, -1.772, 1.716, ... ], [ 1.116, 1.192, -1.636, -1.568, 1.7606843141029813, -1.688, 1.5710090475061316, 1.7110164929658729, ... ], [ -1.068, -1.224, -0.88, 0.088, -1.5, 1.7587061318451154, 0.132, 0.6, ... ], ... ],
    	[ [ -0.348, -0.072, 0.488, 0.412, 0.444, 0.688, -0.356, -0.916, ... ], [ -1.844, -1.196, 1.922749767329383, -0.768, -1.516, 1.5271810567143447, 0.624, -1.904, ... ], [ 1.5119763208929309, 0.084, 1.828, 1.5445933204045752, 0.468, -0.1, -0.396, -1.704, ... ], [ 0.16, 0.432, 1.296, -1.668, 1.14, -0.036, 0.9562356010696793, 1.3698635094523908, ... ], [ -1.592, -1.264, 1.6654322200674292, -0.768, -1.812, 0.0, -0.344, 1.064, ... ], [ 1.5496241856528408, 1.6175406374141759, -0.144, -0.452, 1.68, 0.736, 1.096, 0.304, ... ], [ -1.708, -0.436, 1.3309554338332648, -0.152, 0.544, 0.18, -1.112, -0.884, ... ], [ 0.456, 0.14, 0.856, 0.064, -1.188, -1.504, 1.450045496849203, 1.875693361609113, ... ], ... ],
    	[ [ 1.0242019437740109, -0.356, -1.516, 0.476, -0.004, 0.9630900630159386, -0.204, 0.288, ... ], [ 0.524, -0.656, -1.496, -1.028, -0.996, -1.708, 1.4703795370770298, -0.5, ... ], [ -1.8, 0.452, 1.092, -0.772, -1.584, -1.748, -1.116, -0.792, ... ], [ -1.384, 0.24, 1.6913767450296269, -1.308, 0.9230763620974687, 0.452, -0.4, 1.75764786475991, ... ], [ -1.08, 0.912, 0.888, 1.132, -0.868, -0.356, 0.212, -0.908, ... ], [ -1.928, 1.436, 1.6649336160911308, 0.056, 1.20211281144054, 1.518163892384549, -0.476, 0.284, ... ], [ 1.42, -1.688, 1.432, -1.26, -0.136, -0.308, 0.052, -1.0, ... ], [ -0.34, 1.06, -0.26, -0.448, -1.852, 1.42, -0.64, 0.396, ... ], ... ],
    	[ [ 0.732, 1.512, -0.968, 1.8860118395535346, 0.7194166577840241, -0.704, 1.596, 1.644, ... ], [ -1.748, 1.8481819873968124, 0.648, -1.62, 0.024, 0.7588407610277892, 0.424, 0.016, ... ], [ -1.396, -1.704, 1.657317547261954, 0.124, 0.544, -0.584, -0.136, 1.8943658361585598, ... ], [ 1.8201620310196136, 1.4472383931170822, 0.904, 1.8311710785257453, 0.256, -0.816, 0.816, -1.484, ... ], [ -1.3, 0.768, -0.34, 0.328, -1.656, 0.452, -1.74, 0.048, ... ], [ 1.5359426635972624, 0.548, 1.068, 0.344, -1.692, 0.356, 0.556, 1.228, ... ], [ -0.736, -0.74, -1.296, 0.36, 0.448, -1.016, 1.4270264711544722, 1.7365796194861054, ... ], [ -0.52, -0.524, 0.648, -1.184, 1.469556871061504, -1.908, -0.904, -0.144, ... ], ... ],
    	[ [ 1.9304331507498969, 0.882951711103394, 1.8, -1.392, -0.09072981095218448, 0.528, -0.144, -1.292, ... ], [ -1.388, -0.304, 1.84, -0.704, -0.512, 0.192, -1.036, -1.752, ... ], [ 1.586921776537596, 0.496, 0.284, -1.204, -0.796, 0.744, 0.796, 1.3695095128473656, ... ], [ -1.16, 1.24, 0.592, 0.884, -0.476, -1.552, -0.488, -0.844, ... ], [ 1.3891255816765422, 1.648, -1.2, 0.88, 1.848, 1.5854322200674293, -0.152, 1.468, ... ], [ 0.504, 0.972, -1.44, -0.432, -1.692, -0.188, -1.564, 1.9355013960237015, ... ], [ -1.836, -1.68, 1.5011255816765423, 1.5959526417858616, -0.836, 1.32, 1.196, 1.876, ... ], [ 0.784, -1.8, -0.688, -0.884, -1.932, 1.248, -1.468, -1.892, ... ], ... ],
    	[ [ 1.24, -1.4, -1.424, 1.0500218177421339, -0.856, 0.9050782234624039, -1.068, -0.544, ... ], [ 0.572, -0.704, 1.73460515995811, -1.48, -0.344, -1.78, -0.044, -0.856, ... ], [ 0.844, -0.304, -0.272, 0.18, -0.924, -0.696, 1.7212302762934184, -1.564, ... ], [ 1.296, 1.8101028332519409, -0.628, -1.336, 1.772209389233752, 0.644, 0.812, -1.68, ... ], [ -1.788, 1.596, 1.6840274018369397, 1.3638753490059252, 0.04, 1.472, -0.352, 1.692555940379036, ... ], [ -1.264, -0.544, 0.996, -1.42, 1.096, 1.044, 1.6992084585512843, 0.568, ... ], [ 0.844, -1.136, -1.612, -0.732, 1.7146014372282392, 0.092, -0.008, 1.348, ... ], [ -1.612, 1.5195450315079693, 1.012, 0.892, 1.348, -0.948, -0.648, 1.5477943334961184, ... ], ... ],
    	[ [ -1.432, -0.708, 0.5605422394605662, 0.44, -0.908, -1.128, 1.312, -0.504, ... ], [ -1.196, -1.964, -1.732, -1.552, 1.7, 1.691444059620964, -1.148, -1.824, ... ], [ -1.916, 1.7330919243808738, -1.692, -0.56, -1.416, 0.284, 0.364, -1.936, ... ], [ -1.644, -0.636, -0.524, -1.424, 1.628, -0.208, -1.044, -1.144, ... ], [ -0.064, -0.02, -1.872, -1.984, -1.148, -0.752, 1.332, -1.78, ... ], [ 1.9437843553075191, -1.38, 1.164, 0.904, -1.264, -0.008, 0.368, -0.204, ... ], [ 0.864, -0.808, 1.7668071037321207, -0.632, 1.208, -1.096, 0.044, -1.152, ... ], [ -1.32, -1.928, -1.756, -1.916, 0.4240299345657979, -1.468, -0.988, 1.377752559376786, ... ], ... ],
    	[ [ -1.936, -0.808, -0.408, 0.9473331095453592, -1.304, -1.044, 1.3617961948610537, 1.5145677799325707, ... ], [ -0.7569173824438026, 1.24, 0.172, -1.264, 0.456, -0.888, -0.96, 1.24, ... ], [ -1.204, 0.008, -1.528, -0.232, 1.8122866820136883, 0.548, -1.704, -1.96, ... ], [ 1.9758753490059253, 1.4316123460993064, 1.79740854096036, -0.656, 1.292, 1.576613276781774, 0.496, 0.948, ... ], [ -1.388, -1.644, 0.324, 1.6715213524009003, 0.768, -0.232, 1.47483078283919, 1.4888152205557847, ... ], [ 0.372, 1.329782493942584, -1.912, -1.268, -1.92, 1.068, -1.512, -0.692, ... ], [ -1.184, -1.408, -0.164, -0.124, 1.7723166165794861, 1.404, 1.5029217765375962, -0.336, ... ], [ -1.272, -1.924, 1.028, 0.984, -0.032, -0.172, 0.572, -0.016, ... ], ... ],
    	...
    ]

To produce the following output:

TrainingTester.java:633 executed in 0.20 seconds (0.000 gc):

    Result[] array = ConstantResult.batchResultArray(pop(RefUtil.addRef(data)));
    @Nullable
    Result eval = layer.eval(array);
    assert eval != null;
    TensorList tensorList = Result.getData(eval);
    String temp_18_0016 = tensorList.stream().limit(1).map(x -> {
      String temp_18_0017 = x.prettyPrint();
      x.freeRef();
      return temp_18_0017;
    }).reduce((a, b) -> a + "\n" + b).orElse("");
    tensorList.freeRef();
    return temp_18_0016;

Returns

    [
    	[ [ 1.5119763208929309, 1.5786114154168385, 1.922749767329383, 1.5445933204045752, 1.6047890087198575, 1.5452639335890868, 0.8001009718870055, 1.8569099369840614, ... ], [ 1.7352483713056817, 1.5786114154168385, 1.922749767329383, 1.5445933204045752, 1.6047890087198575, 1.5271810567143447, 0.9562356010696793, 1.3698635094523908, ... ], [ 1.7352483713056817, 1.6175406374141759, 1.6654322200674292, 1.5750800848273392, 1.844, 0.9969635506569285, 1.5710090475061316, 1.716, ... ], [ 1.5496241856528408, 1.6175406374141759, 1.3309554338332648, 1.5750800848273392, 1.844, 1.7587061318451154, 1.5710090475061316, 1.875693361609113, ... ], [ 1.7138971667480591, 1.8895468928729044, 1.4994976732938308, 1.4990800848273391, 1.874045496849203, 1.8026251163353084, 1.450045496849203, 1.875693361609113, ... ], [ 1.824, 1.8895468928729044, 1.4994976732938308, 1.4990800848273391, 1.874045496849203, 1.8026251163353084, 1.9880909936984061, 1.8876597043134444, ... ], [ 1.824, 1.641115603487943, 1.4689535724683291, 1.43811281144054, 0.9475313305894993, 0.8615768274387025, 1.9880909936984061, 1.8876597043134444, ... ], [ 1.6315687106150383, 1.641115603487943, 1.5278279907917869, 0.9605659185676355, 1.0900654532264016, 1.118089132333471, 1.4720573364027376, 1.7172838899662854, ... ], ... ],
    	[ [ 1.0242019437740109, 1.8481819873968124, 1.657317547261954, 1.8860118395535346, 0.7194166577840241, 0.9630900630159386, 1.596, 1.8943658361585598, ... ], [ 1.8201620310196136, 1.8481819873968124, 1.6913767450296269, 1.8311710785257453, 0.9230763620974687, 0.7588407610277892, 1.4703795370770298, 1.8943658361585598, ... ], [ 1.8201620310196136, 1.4472383931170822, 1.6913767450296269, 1.8311710785257453, 1.20211281144054, 1.518163892384549, 1.4270264711544722, 1.75764786475991, ... ], [ 1.5359426635972624, 1.575865370817326, 1.6649336160911308, 1.0489934852227265, 1.469556871061504, 1.518163892384549, 1.5815468928729044, 1.7365796194861054, ... ], [ 1.916876279688393, 1.575865370817326, 1.5479663427043315, 1.0489934852227265, 1.956, 1.6573075690733545, 1.5815468928729044, 1.4814696000929681, ... ], [ 1.9500118395535346, 1.9431137421230076, 1.4754203805138946, 0.6270701066387399, 1.956, 1.7960810155098068, 1.4363402956865554, 1.3971355598651416, ... ], [ 1.9500118395535346, 1.9431137421230076, 1.721580550168573, 0.6270701066387399, 1.7059781822578661, 1.7960810155098068, 1.4363402956865554, 0.7124175884664916, ... ], [ 1.513250232670617, 1.4214322200674292, 1.721580550168573, 0.5717469752819802, 1.2705677799325708, 1.1939981386350649, 1.6298635094523908, 1.2960336572956686, ... ], ... ],
    	[ [ 1.9304331507498969, 0.882951711103394, 1.84, 1.0500218177421339, -0.09072981095218448, 0.9050782234624039, 1.7212302762934184, 1.3695095128473656, ... ], [ 1.586921776537596, 1.8101028332519409, 1.84, 1.3638753490059252, 1.848, 1.5854322200674293, 1.7212302762934184, 1.692555940379036, ... ], [ 1.3891255816765422, 1.8101028332519409, 1.6840274018369397, 1.5959526417858616, 1.848, 1.5854322200674293, 1.6992084585512843, 1.9355013960237015, ... ], [ 1.1407479059644479, 1.5195450315079693, 1.5011255816765423, 1.5959526417858616, 1.7146014372282392, 1.8230701066387398, 1.6992084585512843, 1.9355013960237015, ... ], [ 1.7176005065457716, 1.7679763208929309, 1.7954104023252955, 1.1087379277758487, 1.4372028744564787, 1.84, 1.6868881192419276, 1.5477943334961184, ... ], [ 1.7176005065457716, 1.7679763208929309, 1.7954104023252955, 1.668516027624639, 1.3259644813393963, 1.9586824527380462, 1.6868881192419276, 1.3413075690733545, ... ], [ 1.1914066795954248, 1.655272050412751, 1.0725085821648979, 1.668516027624639, 1.3377762384838552, 1.9586824527380462, 1.9219445249621976, 1.5203876539006937, ... ], [ 1.4658971667480591, 1.724023679107069, 1.7958516698988563, 1.824, 1.3377762384838552, 1.866045496849203, 1.9219445249621976, 1.7475687106150384, ... ], ... ],
    	[ [ -0.7569173824438026, 1.7330919243808738, 0.5605422394605662, 0.9473331095453592, 1.8122866820136883, 1.691444059620964, 1.3617961948610537, 1.5145677799325707, ... ], [ 1.9758753490059253, 1.7330919243808738, 1.79740854096036, 1.6715213524009003, 1.8122866820136883, 1.691444059620964, 1.47483078283919, 1.4888152205557847, ... ], [ 1.9758753490059253, 1.4316123460993064, 1.79740854096036, 1.6715213524009003, 1.7723166165794861, 1.576613276781774, 1.5029217765375962, 1.4888152205557847, ... ], [ 1.9437843553075191, 1.329782493942584, 1.7668071037321207, 1.0682493019881494, 1.7723166165794861, 1.82211281144054, 1.7089772515753985, 1.377752559376786, ... ], [ 1.6248625787699231, 1.7988444837576598, 1.8835013960237015, 1.7235687106150384, 0.4240299345657979, 1.82211281144054, 1.8848189432856552, 1.377752559376786, ... ], [ 1.98164786475991, 1.7988444837576598, 1.8835013960237015, 1.731454037809563, 1.4068208046505906, 1.5982274842460154, 1.8848189432856552, 1.062884396512057, ... ], [ 1.98164786475991, 1.085940802232327, 1.4572265535635476, 1.731454037809563, 1.4068208046505906, 1.8016615656783799, 1.8349554338332645, 0.9013611827462217, ... ], [ 0.582075431415001, 1.0648725569585225, 1.1354066795954247, 1.575932685408663, 1.780808965097056, 1.8016615656783799, 1.6593967014068256, 1.7715450315079693, ... ], ... ],
    	[ [ 1.0521346291826739, 0.07676786234164637, 1.61435213524009, 1.8239663427043316, 1.756081015509807, 1.8736815220555785, 1.7445596631089066, 1.7941701478432777, ... ], [ 0.9351673557958748, 1.0985777581211702, 1.82540854096036, 1.8703084997558221, 1.5313967014068255, 1.8736815220555785, 1.7445596631089066, 1.7218398303453215, ... ], [ 1.716397632089293, 1.3145004653412338, 1.82540854096036, 1.8703084997558221, 1.6798180126031876, 1.55694359427973, 1.611477716916632, 1.7218398303453215, ... ], [ 1.716397632089293, 1.316, 1.5429080756191262, 1.503376745029627, 1.6798180126031876, 1.033236531752147, 0.9605222830833677, 1.245513235577236, ... ], [ 1.624344018416426, 1.7878853271945245, 0.938951711103394, 1.0648052423671854, 1.613250232670617, 1.8338298521567225, 1.7535468928729046, 1.245513235577236, ... ], [ 1.624344018416426, 1.7878853271945245, 1.48211281144054, 1.2782137833275455, 1.1504630853156947, 1.8662274842460154, 1.7535468928729046, 1.7163003829321581, ... ], [ 1.0597880780373898, 0.5184630853156947, 1.518456829856966, 1.8431137421230077, 1.1504630853156947, 1.8662274842460154, 0.7857014784327772, 1.7163003829321581, ... ], [ 1.7365123048947684, 1.8887516286943185, 1.518456829856966, 1.8431137421230077, 1.7084549684920307, 1.4273393650040878, 1.6772838899662854, 1.4045796194861053, ... ], ... ],
    	[ [ -0.5098472758050627, 1.4605496849203075, 1.078898097430527, 1.8955687106150383, 1.7956634270433152, 1.435798056225989, 0.7635849442623664, 1.2990663839088692, ... ], [ 0.7160835482386649, 1.8424767862341647, 1.6006606349959123, 1.4931592389722108, 1.207918984490193, 1.5834440596209638, 0.7635849442623664, 1.183191034902944, ... ], [ 0.7160835482386649, 1.8424767862341647, 1.624, 1.8006843141029814, 1.6068744183234578, 1.6326032985931744, 1.4386487954423777, 1.69012651235901, ... ], [ 1.818658773630977, 1.387386723218226, 1.624, 1.8006843141029814, 1.817991883176336, 1.6326032985931744, 1.4386487954423777, 1.69012651235901, ... ], [ 1.818658773630977, 1.387386723218226, 1.7110900630159385, 1.5604549684920308, 1.817991883176336, 1.559491417835102, 0.6369972079525971, 1.4045023267061691, ... ], [ 1.6904767862341645, 1.732411333007763, 1.7110900630159385, 0.7985877363097694, 1.6910364493430714, 1.1319052835717232, 1.692, 1.6675487542378398, ... ], [ 1.71505640572027, 1.984, 1.7131118807580723, 1.6448289214742549, 1.6752620722241516, 1.1319052835717232, 1.692, 1.6675487542378398, ... ], [ 1.71505640572027, 1.984, 1.7131118807580723, 1.7426587736309769, 1.4800099781885991, 1.8558180126031878, 1.6734322200674292, 1.2616915002441778, ... ], ... ],
    	[ [ 1.256081015509807, 1.5657725157539846, 1.0452065971863491, 1.7503284561330208, 0.7868033810022501, 0.8149180538077255, 1.635228414928483, 1.4713057077084193, ... ], [ 1.9180691759562722, 0.7125085821648978, 1.0452065971863491, 1.7503284561330208, 1.87177437711892, 1.601978182257866, 1.635228414928483, 1.7988407610277892, ... ], [ 1.9180691759562722, 1.6165895976747047, 0.9083303174979561, 1.4633057077084193, 1.87177437711892, 1.601978182257866, 1.7990900630159385, 1.8720137009184699, ... ], [ 1.875784355307519, 1.6165895976747047, 1.69670427048018, 1.7512483713056817, 1.797287612696156, 1.0194876951052316, 1.7990900630159385, 1.8720137009184699, ... ], [ 1.3858061730496531, 0.7403303174979561, 1.7002156446924808, 1.7512483713056817, 1.797287612696156, 1.2357943334961186, 1.13340854096036, 1.3781601696546784, ... ], [ 0.7383047770259517, 1.7390464275316708, 1.8135232137658355, 1.3339445249621975, 1.086193826950347, 1.3704531071270956, 1.5994876951052317, 1.3781601696546784, ... ], [ 1.1195550096965685, 1.7390464275316708, 1.8135232137658355, 1.3339445249621975, 0.9885796194861054, 1.3704531071270956, 1.5994876951052317, 0.9684175884664916, ... ], [ 1.1195550096965685, 1.5829990693175324, 1.6425515462852427, 0.5973848618532909, 1.035123720311607, 1.6379208458551284, 1.928081015509807, 1.5040772927799362, ... ], ... ],
    	[ [ 1.7313393650040876, 1.028771585071517, 1.0345777581211701, 0.7672920067899494, 0.6539881604464655, 1.5118853271945245, 0.2823484125102194, 1.379032726613201, ... ], [ 1.7313393650040876, 1.318975390210463, 1.4627260882223139, 1.4899308240437277, 1.2460417741193326, 1.5118853271945245, 1.3826687518195762, 1.6343047770259516, ... ], [ 1.9250919243808737, 1.3511374212300769, 1.8693649054760921, 1.6904094716428277, 1.2460417741193326, 1.6369672733867993, 1.3826687518195762, 1.6343047770259516, ... ], [ 1.9640909936984061, 1.3511374212300769, 1.8693649054760921, 1.7364786475991, 1.0663621134286894, 1.6369672733867993, 1.261236531752147, 1.0942474406232139, ... ], [ 1.9640909936984061, 1.2154640159981622, 1.2341028332519408, 1.7364786475991, 1.8202493019881494, 1.2780891323334709, 1.261236531752147, 1.3864904871526345, ... ], [ 1.5497488366469154, 1.8602493019881494, 1.2671910349029438, 1.716, 1.8202493019881494, 1.4499382695034688, 1.8273867232182261, 1.3864904871526345, ... ], [ 1.5583994934542282, 1.8602493019881494, 1.2671910349029438, 1.716, 1.7573412263690231, 1.4499382695034688, 1.8273867232182261, 1.14, ... ], [ 1.6316871061503844, 1.6860255404720046, 1.0213948400418902, 1.5190227484246015, 1.588185710126683, 1.2329872297639977, 1.8596933616091131, 1.40046494668063, ... ], ... ],
    	...
    ]

Conjugate Gradient Descent

First, we use a conjugate gradient descent method, which converges the fastest for purely linear functions.

TrainingTester.java:452 executed in 104.24 seconds (7.083 gc):

    IterativeTrainer iterativeTrainer = new IterativeTrainer(trainable.addRef());
    try {
      iterativeTrainer.setLineSearchFactory(label -> new QuadraticSearch());
      iterativeTrainer.setOrientation(new GradientDescent());
      iterativeTrainer.setMonitor(TrainingTester.getMonitor(history));
      iterativeTrainer.setTimeout(30, TimeUnit.SECONDS);
      iterativeTrainer.setMaxIterations(250);
      iterativeTrainer.setTerminateThreshold(0);
      return iterativeTrainer.run();
    } finally {
      iterativeTrainer.freeRef();
    }
Logging
Reset training subject: 651752720238
Reset training subject: 653408677902
Constructing line search parameters: GD
F(0.0) = LineSearchPoint{point=PointSample{avg=0.31693414565}, derivative=-1.3156303124687387E-7}
F(1.0E-10) = LineSearchPoint{point=PointSample{avg=0.31693414565}, derivative=-1.3156310181835826E-7}, evalInputDelta = 0.0
New Minimum: 0.31693414565 > 0.3169341456499999
F(7.000000000000001E-10) = LineSearchPoint{point=PointSample{avg=0.3169341456499999}, derivative=-1.3140314921562387E-7}, evalInputDelta = -1.1102230246251565E-16
New Minimum: 0.3169341456499999 > 0.31693414564999933
F(4.900000000000001E-9) = LineSearchPoint{point=PointSample{avg=0.31693414564999933}, derivative=-1.3134565703828E-7}, evalInputDelta = -6.661338147750939E-16
New Minimum: 0.31693414564999933 > 0.31693414564999556
F(3.430000000000001E-8) = LineSearchPoint{point=PointSample{avg=0.31693414564999556}, derivative=-1.3134485227382601E-7}, evalInputDelta = -4.440892098500626E-15
New Minimum: 0.31693414564999556 > 0.31693414564996847
F(2.4010000000000004E-7) = LineSearchPoint{point=PointSample{avg=0.31693414564996847}, derivative=-1.3134484386327213E-7}, evalInputDelta = -3.1530333899354446E-14
New Minimum: 0.31693414564996847 > 0.3169341456497793
F(1.6807000000000003E-6) = LineSearchPoint{point=PointSample{avg=0.3169341456497793}, derivative=-1.3134484370697346E-7}, evalInputDelta = -2.2071233729548112E-13
New Minimum: 0.3169341456497793 > 0.31693414564845473
F(1.1764900000000001E-5) = LineSearchPoint{point=PointSample{avg=0.31693414564845473}, derivative=-1.3134484370663097E-7}, evalInputDelta = -1.5452639168245241E-12
New Minimum: 0.31693414564845473 > 0.3169341456391832
F(8.235430000000001E-5) = LineSearchPoint{point=PointSample{avg=0.3169341456391832}, derivative=-1.313448437042349E-7}, evalInputDelta = -1.0816791906620438E-11
New Minimum: 0.3169341456391832 > 0.31693414557428234
F(5.764801000000001E-4) = LineSearchPoint{point=PointSample{avg=0.31693414557428234}, derivative=-1.31344843687454E-7}, evalInputDelta = -7.571765436864553E-11
New Minimum: 0.31693414557428234 > 0.3169341451199762
F(0.004035360700000001) = LineSearchPoint{point=PointSample{avg=0.3169341451199762}, derivative=-1.3134484356999513E-7}, evalInputDelta = -5.300238026251236E-10
New Minimum: 0.3169341451199762 > 0.31693414193983327
F(0.028247524900000005) = LineSearchPoint{point=PointSample{avg=0.31693414193983327}, derivative=-1.3134484274778265E-7}, evalInputDelta = -3.7101667293981677E-9
New Minimum: 0.31693414193983327 > 0.31693411967883345
F(0.19773267430000002) = LineSearchPoint{point=PointSample{avg=0.31693411967883345}, derivative=-1.31344836992293E-7}, evalInputDelta = -2.5971166550675662E-8
New Minimum: 0.31693411967883345 > 0.3169339638518621
F(1.3841287201) = LineSearchPoint{point=PointSample{avg=0.3169339638518621}, derivative=-1.3134479670386401E-7}, evalInputDelta = -1.817981378771094E-7
New Minimum: 0.3169339638518621 > 0.3169328730644011
F(9.688901040700001) = LineSearchPoint{point=PointSample{avg=0.3169328730644011}, derivative=-1.3134451468485555E-7}, evalInputDelta = -1.2725855988993118E-6
New Minimum: 0.3169328730644011 > 0.31692523761775243
F(67.8223072849) = LineSearchPoint{point=PointSample{avg=0.31692523761775243}, derivative=-1.3134254055180558E-7}, evalInputDelta = -8.908032247567643E-6
New Minimum: 0.31692523761775243 > 0.3168717927045786
F(474.7561509943) = LineSearchPoint{point=PointSample{avg=0.3168717927045786}, derivative=-1.313287216204549E-7}, evalInputDelta = -6.235294542139558E-5
New Minimum: 0.3168717927045786 > 0.3164978357673053
F(3323.2930569601003) = LineSearchPoint{point=PointSample{avg=0.3164978357673053}, derivative=-1.3123198910099275E-7}, evalInputDelta = -4.363098826947187E-4
New Minimum: 0.3164978357673053 > 0.31389058053076246
F(23263.0513987207) = LineSearchPoint{point=PointSample{avg=0.31389058053076246}, derivative=-1.3019120273453553E-7}, evalInputDelta = -0.0030435651192375346
New Minimum: 0.31389058053076246 > 0.2962891173127587
F(162841.3597910449) = LineSearchPoint{point=PointSample{avg=0.2962891173127587}, derivative=-1.2209336245869936E-7}, evalInputDelta = -0.0206450283372413
New Minimum: 0.2962891173127587 > 0.2009940993424557
F(1139889.5185373144) = LineSearchPoint{point=PointSample{avg=0.2009940993424557}, derivative=-7.587919110583254E-8}, evalInputDelta = -0.11594004630754429
F(7979226.6297612) = LineSearchPoint{point=PointSample{avg=0.20770631983063445}, derivative=6.049836225200946E-8}, evalInputDelta = -0.10922782581936555
0.20770631983063445 <= 0.31693414565
New Minimum: 0.2009940993424557 > 0.10901710928746422
F(5465810.818576949) = LineSearchPoint{point=PointSample{avg=0.10901710928746422}, derivative=1.859604557112221E-8}, evalInputDelta = -0.20791703636253578
Right bracket at 5465810.818576949
New Minimum: 0.10901710928746422 > 0.10010034735298082
F(4788912.230630861) = LineSearchPoint{point=PointSample{avg=0.10010034735298082}, derivative=7.736224776202926E-9}, evalInputDelta = -0.21683379829701918
Right bracket at 4788912.230630861
New Minimum: 0.10010034735298082 > 0.09861673766827665
F(4522951.71865562) = LineSearchPoint{point=PointSample{avg=0.09861673766827665}, derivative=3.4124111675747183E-9}, evalInputDelta = -0.21831740798172333
Right bracket at 4522951.71865562
New Minimum: 0.09861673766827665 > 0.09833392627215432
F(4408603.725575884) = LineSearchPoint{point=PointSample{avg=0.09833392627215432}, derivative=1.5318819220906628E-9}, evalInputDelta = -0.21860021937784568
Right bracket at 4408603.725575884
New Minimum: 0.09833392627215432 > 0.09827743947114276
F(4357862.039150215) = LineSearchPoint{point=PointSample{avg=0.09827743947114276}, derivative=6.932151071418915E-10}, evalInputDelta = -0.21865670617885724
Right bracket at 4357862.039150215
Converged to right
Fitness changed from 0.31693414565 to 0.09827743947114276
Iteration 1 complete. Error: 0.09827743947114276 Total: 104.2367; Orientation: 2.2010; Line Search: 97.0756
Final threshold in iteration 1: 0.09827743947114276 (> 0.0) after 104.238s (< 30.000s)

Returns

    0.09827743947114276

This training apply resulted in the following configuration:

TrainingTester.java:610 executed in 0.00 seconds (0.000 gc):

    RefList<double[]> state = network.state();
    assert state != null;
    String description = state.stream().map(RefArrays::toString).reduce((a, b) -> a + "\n" + b)
        .orElse("");
    state.freeRef();
    return description;

Returns

    

And regressed input:

TrainingTester.java:622 executed in 0.01 seconds (0.000 gc):

    return RefArrays.stream(RefUtil.addRef(data)).flatMap(x -> {
      return RefArrays.stream(x);
    }).limit(1).map(x -> {
      String temp_18_0015 = x.prettyPrint();
      x.freeRef();
      return temp_18_0015;
    }).reduce((a, b) -> a + "\n" + b).orElse("");

Returns

    [
    	[ [ 0.044, -0.236, -0.1, 0.756, 0.58, 1.002149702804061, -0.816, 1.6781068980424894, ... ], [ -1.084, -0.788, -1.76, -1.088, -0.216, 1.348, -1.688, -0.3, ... ], [ 1.044, 2.0479693926384654, -1.972, -0.148, 0.5654963448719688, -0.18, 1.0750106442596727, 0.716, ... ], [ -1.628, -0.26, -0.732, 1.036, -0.424, -0.932, 0.548, -1.952, ... ], [ 2.0794442212682083, -0.064, 1.468, -0.476, -1.948, 1.4953770217065614, 0.4, 1.22, ... ], [ 0.384, 1.504, 1.092, 1.461093147502087, 1.844, 0.88, -1.772, 1.716, ... ], [ 1.116, 1.192, -1.636, -1.568, 1.2332153492253433, -1.688, 2.060482366608686, 0.6270230693825911, ... ], [ -1.068, -1.224, -0.88, 0.088, -1.5, 1.4234504338296672, 0.132, 0.6, ... ], ... ],
    	[ [ -0.348, -0.072, 0.488, 0.412, 0.444, 0.688, -0.356, -0.916, ... ], [ -1.844, -1.196, 1.9719206030383154, -0.768, -1.516, 1.6881037917617596, 0.624, -1.904, ... ], [ 1.7131297397021996, 0.084, 1.828, 1.0350046594210944, 0.468, -0.1, -0.396, -1.704, ... ], [ 0.16, 0.432, 1.296, -1.668, 1.14, -0.036, 1.5976915032725698, 1.3966839652936267, ... ], [ -1.592, -1.264, 1.5805007765701824, -0.768, -1.812, 0.0, -0.344, 1.064, ... ], [ 1.721722110634104, 0.2340521236037605, -0.144, -0.452, 1.68, 0.736, 1.096, 0.304, ... ], [ -1.708, -0.436, 1.1432122429446137, -0.152, 0.544, 0.18, -1.112, -0.884, ... ], [ 0.456, 0.14, 0.856, 0.064, -1.188, -1.504, 1.4411053449021245, 1.8376977158340289, ... ], ... ],
    	[ [ 1.5740212885193456, -0.356, -1.516, 0.476, -0.004, 1.1418931019575107, -0.204, 0.288, ... ], [ 0.524, -0.656, -1.496, -1.028, -0.996, -1.708, 0.5115482407528484, -0.5, ... ], [ -1.8, 0.452, 1.092, -0.772, -1.584, -1.748, -1.116, -0.792, ... ], [ -1.384, 0.24, 1.322595477212634, -1.308, 1.5958227961151343, 0.452, -0.4, 1.7285923709319044, ... ], [ -1.08, 0.912, 0.888, 1.132, -0.868, -0.356, 0.212, -0.908, ... ], [ -1.928, 1.436, 1.2849771583402898, 0.056, 1.376445774408573, 0.5034566463911263, -0.476, 0.284, ... ], [ 1.42, -1.688, 1.432, -1.26, -0.136, -0.308, 0.052, -1.0, ... ], [ -0.34, 1.06, -0.26, -0.448, -1.852, 1.42, -0.64, 0.396, ... ], ... ],
    	[ [ 0.732, 1.512, -0.968, 1.7854351301489002, 1.5217952950343294, -0.704, 1.596, 1.644, ... ], [ -1.748, 1.8124213796084978, 0.648, -1.62, 0.024, 0.7901312928425644, 0.424, 0.016, ... ], [ -1.396, -1.704, 1.7914198264681331, 0.124, 0.544, -0.584, -0.136, 1.429477934910472, ... ], [ 1.19882147069765, 1.4986442668127844, 0.904, 1.6993038373063358, 0.256, -0.816, 0.816, -1.484, ... ], [ -1.3, 0.768, -0.34, 0.328, -1.656, 0.452, -1.74, 0.048, ... ], [ 1.6454595249489752, 0.548, 1.068, 0.344, -1.692, 0.356, 0.556, 1.228, ... ], [ -0.736, -0.74, -1.296, 0.36, 0.448, -1.016, 0.6358230238380148, 1.7209343535787178, ... ], [ -0.52, -0.524, 0.648, -1.184, 1.4583816811276555, -1.908, -0.904, -0.144, ... ], ... ],
    	[ [ 1.6488183644169205, 1.481941891557661, 1.8, -1.392, 0.44567930587253235, 0.528, -0.144, -1.292, ... ], [ -1.388, -0.304, 1.84, -0.704, -0.512, 0.192, -1.036, -1.752, ... ], [ 1.3075420281913894, 0.496, 0.284, -1.204, -0.796, 0.744, 0.796, 1.7606411605320549, ... ], [ -1.16, 1.24, 0.592, 0.884, -0.476, -1.552, -0.488, -0.844, ... ], [ 1.2661984924042113, 1.648, -1.2, 0.88, 1.848, 1.5005007765701823, -0.152, 1.468, ... ], [ 0.504, 0.972, -1.44, -0.432, -1.692, -0.188, -1.564, 1.6404763817701071, ... ], [ -1.836, -1.68, 1.3781984924042114, 1.9982594794043993, -0.836, 1.32, 1.196, 1.876, ... ], [ 0.784, -1.8, -0.688, -0.884, -1.932, 1.248, -1.468, -1.892, ... ], ... ],
    	[ [ 1.24, -1.4, -1.424, 1.2422350846043242, -0.856, 1.1844579718086106, -1.068, -0.544, ... ], [ 0.572, -0.704, 1.1244397895699945, -1.48, -0.344, -1.78, -0.044, -0.856, ... ], [ 0.844, -0.304, -0.272, 0.18, -0.924, -0.696, 1.0864794880508368, -1.564, ... ], [ 1.296, 1.691645819953149, -0.628, -1.336, 0.7485619912932511, 0.644, 0.812, -1.68, ... ], [ -1.788, 1.596, 0.696140611684753, 1.2901190954425268, 0.04, 1.472, -0.352, 1.8780640932809174, ... ], [ -1.264, -0.544, 0.996, -1.42, 1.096, 1.044, 0.8722444034465127, 0.568, ... ], [ 0.844, -1.136, -1.612, -0.732, 1.891169438183042, 0.092, -0.008, 1.348, ... ], [ -1.612, 1.6089465509787553, 1.012, 0.892, 1.348, -0.948, -0.648, 1.7847083600937017, ... ], ... ],
    	[ [ -1.432, -0.708, 1.2399937874385407, 0.44, -0.908, -1.128, 1.312, -0.504, ... ], [ -1.196, -1.964, -1.732, -1.552, 1.7, 1.5059359067190827, -1.148, -1.824, ... ], [ -1.916, 1.518528277650987, -1.692, -0.56, -1.416, 0.284, 0.364, -1.936, ... ], [ -1.644, -0.636, -0.524, -1.424, 1.628, -0.208, -1.044, -1.144, ... ], [ -0.064, -0.02, -1.872, -1.984, -1.148, -0.752, 1.332, -1.78, ... ], [ 1.887908405638278, -1.38, 1.164, 0.904, -1.264, -0.008, 0.368, -0.204, ... ], [ 0.864, -0.808, 1.70646107808934, -0.632, 1.208, -1.096, 0.044, -1.152, ... ], [ -1.32, -1.928, -1.756, -1.916, 1.3023998633662717, -1.468, -0.988, 0.8368733665785298, ... ], ... ],
    	[ [ -1.936, -0.808, -0.408, 0.19412530800398586, -1.304, -1.044, 1.205343535787178, 1.5994992234298175, ... ], [ 0.9953523991836055, 1.24, 0.172, -1.264, 0.456, -0.888, -0.96, 1.24, ... ], [ -1.204, 0.008, -1.528, -0.232, 1.2647023752551232, 0.548, -1.704, -1.96, ... ], [ 1.9021190954425269, 1.704286980485204, 1.9136305162723821, -0.656, 1.292, 1.6526045683319421, 0.496, 0.948, ... ], [ -1.388, -1.644, 0.324, 1.962076290680955, 0.768, -0.232, 1.2133313383871407, 2.1146258568512875, ... ], [ 0.372, 1.6672732299448016, -1.912, -1.268, -1.92, 1.068, -1.512, -0.692, ... ], [ -1.184, -1.408, -0.164, -0.124, 2.103102238621395, 1.404, 1.2235420281913896, -0.336, ... ], [ -1.272, -1.924, 1.028, 0.984, -0.032, -0.172, 0.572, -0.016, ... ], ... ],
    	...
    ]

To produce the following output:

TrainingTester.java:633 executed in 0.19 seconds (0.000 gc):

    Result[] array = ConstantResult.batchResultArray(pop(RefUtil.addRef(data)));
    @Nullable
    Result eval = layer.eval(array);
    assert eval != null;
    TensorList tensorList = Result.getData(eval);
    String temp_18_0016 = tensorList.stream().limit(1).map(x -> {
      String temp_18_0017 = x.prettyPrint();
      x.freeRef();
      return temp_18_0017;
    }).reduce((a, b) -> a + "\n" + b).orElse("");
    tensorList.freeRef();
    return temp_18_0016;

Returns

    [
    	[ [ 1.7131297397021996, 2.0479693926384654, 1.9719206030383154, 1.0350046594210944, 0.58, 1.6881037917617596, 1.0750106442596727, 1.6781068980424894, ... ], [ 2.0794442212682083, 2.0479693926384654, 1.9719206030383154, 1.036, 1.14, 1.6881037917617596, 1.5976915032725698, 1.3966839652936267, ... ], [ 2.0794442212682083, 1.504, 1.5805007765701824, 1.461093147502087, 1.844, 1.4953770217065614, 2.060482366608686, 1.716, ... ], [ 1.721722110634104, 1.504, 1.1432122429446137, 1.461093147502087, 1.844, 1.4234504338296672, 2.060482366608686, 1.8376977158340289, ... ], [ 1.8323541800468508, 1.5855817266722316, 1.9912060303831547, 1.385093147502087, 1.8651053449021247, 1.7780396984808422, 1.4411053449021245, 1.8376977158340289, ... ], [ 1.8323541800468508, 1.5855817266722316, 1.9912060303831547, 1.612445774408573, 1.8651053449021247, 1.7780396984808422, 1.9702106898042488, 1.7580275010808049, ... ], [ 1.824, 1.288, 1.6745770672511373, 1.612445774408573, 1.5308762451363789, 1.4359815900385033, 1.9702106898042488, 1.7597496117149087, ... ], [ 1.5198168112765558, 1.532, 1.856378574846926, 1.4388640477363412, 1.6667052538129723, 1.4935755141107725, 1.3625404750510248, 1.7597496117149087, ... ], ... ],
    	[ [ 1.5740212885193456, 1.8124213796084978, 1.7914198264681331, 1.7854351301489002, 1.5217952950343294, 1.1418931019575107, 1.596, 1.644, ... ], [ 1.19882147069765, 1.8124213796084978, 1.7914198264681331, 1.6993038373063358, 1.5958227961151343, 0.7901312928425644, 0.816, 1.7285923709319044, ... ], [ 1.6454595249489752, 1.4986442668127844, 1.432, 1.6993038373063358, 1.5958227961151343, 0.5034566463911263, 0.816, 1.7285923709319044, ... ], [ 1.6454595249489752, 1.436, 1.4563297852467758, 2.4257768850728327, 1.4583816811276555, 1.42, 1.2775817266722316, 1.7209343535787178, ... ], [ 1.646436683289265, 1.209319140987103, 1.4563297852467758, 2.4257768850728327, 1.956, 1.498619872012709, 1.2775817266722316, 1.5669984468596352, ... ], [ 1.8494351301489003, 1.920763362255311, 1.4910656464212821, 0.416, 1.956, 1.498619872012709, 1.5659724989191954, 1.5669984468596352, ... ], [ 1.8494351301489003, 1.920763362255311, 1.66, 0.22029319304666295, 1.5137649153956758, 1.485410735348825, 1.5659724989191954, 1.3181128828810678, ... ], [ 1.4640793969616845, 1.3365007765701824, 1.66, 1.210967839498101, 1.3554992234298175, 1.5873648243065237, 1.6566839652936265, 1.3876702147532243, ... ], ... ],
    	[ [ 1.6488183644169205, 1.481941891557661, 1.84, 1.2422350846043242, 0.44567930587253235, 1.1844579718086106, 1.0864794880508368, 1.7606411605320549, ... ], [ 1.3075420281913894, 1.691645819953149, 1.84, 1.2901190954425268, 1.848, 1.5005007765701823, 1.0864794880508368, 1.8780640932809174, ... ], [ 1.296, 1.691645819953149, 1.3781984924042114, 1.9982594794043993, 1.891169438183042, 1.5005007765701823, 1.196, 1.8780640932809174, ... ], [ 1.5832854273448391, 1.6089465509787553, 1.448, 1.9982594794043993, 1.891169438183042, 1.4162931930466631, 1.196, 1.876, ... ], [ 2.090851850336304, 1.9691297397021994, 1.5182656919658581, 1.2584854728894153, 1.790338876366084, 1.84, 1.3158718134381653, 1.7847083600937017, ... ], [ 2.090851850336304, 1.9691297397021994, 1.6603235726853165, 1.348, 1.6276946095532994, 1.84, 1.3158718134381653, 1.1826198720127092, ... ], [ 1.7009953405789056, 1.7983144815660086, 1.6603235726853165, 1.396, 1.136, 1.824580173531867, 1.6380947006424518, 1.392, ... ], [ 1.5843541800468508, 1.5228702602978006, 1.9232488351447263, 1.824, 0.5957436268763303, 1.8571053449021244, 1.6380947006424518, 1.6358168112765556, ... ], ... ],
    	[ [ 0.9953523991836055, 1.518528277650987, 1.2399937874385407, 0.44, 1.7, 1.5059359067190827, 1.312, 1.5994992234298175, ... ], [ 1.9021190954425269, 1.704286980485204, 1.9136305162723821, 1.962076290680955, 1.7, 1.6526045683319421, 1.332, 2.1146258568512875, ... ], [ 1.9021190954425269, 1.704286980485204, 1.9136305162723821, 1.962076290680955, 2.103102238621395, 1.6526045683319421, 1.332, 2.1146258568512875, ... ], [ 1.887908405638278, 1.6672732299448016, 1.70646107808934, 1.2157618091149465, 2.103102238621395, 1.9964457744085728, 1.7134473275489377, 0.928, ... ], [ 1.8483663774468884, 1.043401644229517, 1.588476381770107, 1.6118168112765556, 1.3023998633662717, 1.9964457744085728, 1.7238962082382403, 1.4786014620512125, ... ], [ 1.9525923709319044, 1.588824349255499, 1.609209136663884, 1.8387358611745064, 0.8525313839317167, 1.5535267245106223, 1.7238962082382403, 1.4786014620512125, ... ], [ 1.9525923709319044, 1.588824349255499, 1.6449953405789055, 1.8387358611745064, 0.8525313839317167, 1.5535267245106223, 1.6472122429446137, 1.419889995676781, ... ], [ 1.4515052082683964, 1.5811663319023121, 1.6449953405789055, 1.3926595704935514, 1.3270962537828166, 1.468, 1.8761953861234817, 1.8609465509787553, ... ], ... ],
    	[ [ 1.5900334859193832, 1.1048853362556867, 1.6434076290680955, 1.7323297852467756, 1.445410735348825, 1.9362625856851285, 1.324, 1.8589862494595977, ... ], [ 1.5900334859193832, 1.4762991778852415, 1.941630516272382, 1.656, 1.7481953861234818, 1.9362625856851285, 1.5176061214723067, 1.9498137049958262, ... ], [ 1.7365129739702199, 1.4762991778852415, 1.941630516272382, 1.5149374598594474, 1.7481953861234818, 1.478009091119308, 1.5176061214723067, 1.9498137049958262, ... ], [ 1.7365129739702199, 1.316, 1.7574717223490128, 1.397825902395864, 1.7155786203915022, 1.478009091119308, 1.0543938785276932, 0.8499115119190075, ... ], [ 1.572, 2.0069190498979506, 1.537941891557661, 1.397825902395864, 1.5640793969616846, 1.7690137505404024, 1.4495817266722317, 1.216, ... ], [ 1.572, 2.0069190498979506, 1.656445774408573, 1.7274564186682457, 1.747218227783192, 1.8215267245106224, 1.4495817266722317, 1.216, ... ], [ 0.988, 1.1152182277831921, 1.656445774408573, 1.820763362255311, 1.747218227783192, 1.8215267245106224, 1.4338624945959766, 1.3889343535787178, ... ], [ 1.5375939240722691, 1.5445557787317918, 1.08, 1.820763362255311, 1.6190534490212447, 1.753654911072457, 1.719749611714909, 1.3889343535787178, ... ], ... ],
    	[ [ 0.8356455922302688, 0.5665344902124463, 1.000671767893589, 1.7838168112765558, 1.518589264651175, 1.124, 2.024146368800451, 1.6790228416597102, ... ], [ 2.2716699870303434, 1.9452885336255685, 1.274345088927543, 1.4618687071574357, 1.518589264651175, 1.5, 2.024146368800451, 1.6369037462171834, ... ], [ 2.2716699870303434, 1.9452885336255685, 1.624, 1.4618687071574357, 1.7298015075957887, 1.5, 1.2129099587786427, 1.6369037462171834, ... ], [ 1.8857099132340667, 1.311395431668058, 1.624, 1.4710534490212446, 1.7298015075957887, 1.4158046138765181, 1.2270472364597855, 1.3705160802509497, ... ], [ 1.8857099132340667, 1.311395431668058, 1.889893101957511, 1.4710534490212446, 1.1926229782934388, 0.9716764273146834, 1.2270472364597855, 1.368, ... ], [ 1.7932885336255686, 1.48, 1.889893101957511, 1.4690991323406655, 1.5255145271105848, 1.9365189588087983, 1.692, 1.408, ... ], [ 1.8022228872042865, 1.984, 2.0841281865618346, 1.7766961626936644, 1.7727999544554238, 1.9365189588087983, 1.692, 1.6170625401405527, ... ], [ 1.8022228872042865, 1.984, 2.0841281865618346, 1.8097099132340666, 1.7727999544554238, 1.891578620391502, 1.5885007765701824, 1.6170625401405527, ... ], ... ],
    	[ [ 1.22, 1.6104732754893776, 0.8, 1.9805373687702952, 1.5131907267023874, 1.322271676804437, 1.3938443123573605, 1.7059846963192329, ... ], [ 1.7079756051999249, 1.3003235726853166, 0.8, 1.9805373687702952, 1.5231084511828539, 1.4097649153956762, 1.3938443123573605, 1.8301312928425644, ... ], [ 1.819908405638278, 1.8937343080341416, 0.7451725444637715, 1.6979846963192329, 1.5231084511828539, 1.4097649153956762, 1.9778931019575106, 1.8301312928425644, ... ], [ 1.819908405638278, 1.8937343080341416, 1.754815258136191, 2.0954442212682083, 1.372, 1.2184060759277309, 1.9778931019575106, 1.616, ... ], [ 1.522143490242602, 0.5771725444637714, 1.7560915943617221, 2.0954442212682083, 1.372, 1.4727083600937019, 1.249630516272382, 1.1501862950041737, ... ], [ 1.1696671084724946, 1.5334229327488627, 1.7560915943617221, 1.0500947006424517, 0.9729343535787178, 1.6744182733277684, 1.7984060759277307, 1.1501862950041737, ... ], [ 1.5017465054341792, 1.7796824121532617, 1.7107114663744314, 1.0500947006424517, 0.9729343535787178, 1.6744182733277684, 1.7984060759277307, 1.5741128828810678, ... ], [ 1.5017465054341792, 1.7796824121532617, 1.548, 0.9147602559745818, 1.305563316710735, 1.5552244403446513, 1.716, 1.9801403839618723, ... ], ... ],
    	[ [ 2.057654911072457, 1.2701556876426394, 1.4122991778852414, 1.4959143904768561, 0.7545648698510998, 1.7309190498979505, 1.0981372776811429, 1.6673526269064862, ... ], [ 2.057654911072457, 1.7168121518554613, 1.713050342740515, 1.7000243948000753, 2.023834993515172, 1.7309190498979505, 1.7425098676894903, 2.0656671084724945, ... ], [ 1.7105282776509871, 1.7168121518554613, 1.713050342740515, 1.7000243948000753, 2.023834993515172, 1.4, 1.7425098676894903, 2.0656671084724945, ... ], [ 1.9462106898042488, 1.1276336225531116, 1.601160347063734, 1.6099481041191201, 1.3882075835235195, 1.6535755141107726, 1.706009091119308, 1.63512663342147, ... ], [ 1.9462106898042488, 1.6155358156299304, 1.232, 1.445923709319045, 1.9677618091149462, 1.6535755141107726, 1.706009091119308, 0.9953588394679451, ... ], [ 1.7956030151915772, 2.0077618091149465, 1.7209037462171834, 1.716, 1.9677618091149462, 1.156, 1.751395431668058, 1.112, ... ], [ 1.1851481496636962, 2.0077618091149465, 1.7209037462171834, 1.716, 1.6902900867659334, 0.46, 1.751395431668058, 1.14, ... ], [ 1.296, 1.208, 1.6315602104300055, 1.5145526724510623, 1.292, 1.5302472820043616, 1.8216977158340288, 1.6038534034766685, ... ], ... ],
    	...
    ]

Limited-Memory BFGS

Next, we apply the same optimization using L-BFGS, which is nearly ideal for purely second-order or quadratic functions.

TrainingTester.java:509 executed in 52.52 seconds (6.131 gc):

    IterativeTrainer iterativeTrainer = new IterativeTrainer(trainable.addRef());
    try {
      iterativeTrainer.setLineSearchFactory(label -> new ArmijoWolfeSearch());
      iterativeTrainer.setOrientation(new LBFGS());
      iterativeTrainer.setMonitor(TrainingTester.getMonitor(history));
      iterativeTrainer.setTimeout(30, TimeUnit.SECONDS);
      iterativeTrainer.setIterationsPerSample(100);
      iterativeTrainer.setMaxIterations(250);
      iterativeTrainer.setTerminateThreshold(0);
      return iterativeTrainer.run();
    } finally {
      iterativeTrainer.freeRef();
    }
Logging
Reset training subject: 756539124654
Reset training subject: 758006037481
Adding measurement 3d3b19cd to history. Total: 0
LBFGS Accumulation History: 1 points
Constructing line search parameters: GD
Non-optimal measurement 0.31693414565 < 0.31693414565. Total: 1
th(0)=0.31693414565;dx=-1.3156303124687387E-7
Adding measurement 63f01223 to history. Total: 1
New Minimum: 0.31693414565 > 0.31693386267619117
WOLFE (weak): th(2.154434690031884)=0.31693386267619117; dx=-1.3134477054529872E-7 evalInputDelta=2.8297380882946754E-7
Adding measurement 64ac0d01 to history. Total: 2
New Minimum: 0.31693386267619117 > 0.31693357970254
WOLFE (weak): th(4.308869380063768)=0.31693357970254; dx=-1.3134469738356545E-7 evalInputDelta=5.659474600072656E-7
Adding measurement a67b0c6 to history. Total: 3
New Minimum: 0.31693357970254 > 0.31693244780951135
WOLFE (weak): th(12.926608140191302)=0.31693244780951135; dx=-1.3134440473663485E-7 evalInputDelta=1.697840488645852E-6
Adding measurement 28bff2f5 to history. Total: 4
New Minimum: 0.31693244780951135 > 0.31692735432209185
WOLFE (weak): th(51.70643256076521)=0.31692735432209185; dx=-1.3134308782544688E-7 evalInputDelta=6.791327908151068E-6
Adding measurement 210a1a12 to history. Total: 5
New Minimum: 0.31692735432209185 > 0.316900189918363
WOLFE (weak): th(258.53216280382605)=0.316900189918363; dx=-1.3133606429911192E-7 evalInputDelta=3.395573163700316E-5
Adding measurement 915b1d to history. Total: 6
New Minimum: 0.316900189918363 > 0.31673044530656747
WOLFE (weak): th(1551.1929768229563)=0.31673044530656747; dx=-1.312921672595173E-7 evalInputDelta=2.0370034343253351E-4
Adding measurement 771c9b29 to history. Total: 7
New Minimum: 0.31673044530656747 > 0.3155100608440427
WOLFE (weak): th(10858.350837760694)=0.3155100608440427; dx=-1.3091620745180413E-7 evalInputDelta=0.001424084805957293
Adding measurement 618f01cc to history. Total: 8
New Minimum: 0.3155100608440427 > 0.3057295326412294
WOLFE (weak): th(86866.80670208555)=0.3057295326412294; dx=-1.2644451186133647E-7 evalInputDelta=0.011204613008770625
Adding measurement 1c085d01 to history. Total: 9
New Minimum: 0.3057295326412294 > 0.23077524214427925
END: th(781801.26031877)=0.23077524214427925; dx=-9.081368023258879E-8 evalInputDelta=0.08615890350572075
Fitness changed from 0.31693414565 to 0.23077524214427925
Iteration 1 complete. Error: 0.23077524214427925 Total: 52.5156; Orientation: 2.3017; Line Search: 45.7238
Final threshold in iteration 1: 0.23077524214427925 (> 0.0) after 52.516s (< 30.000s)

Returns

    0.23077524214427925

This training apply resulted in the following configuration:

TrainingTester.java:610 executed in 0.00 seconds (0.000 gc):

    RefList<double[]> state = network.state();
    assert state != null;
    String description = state.stream().map(RefArrays::toString).reduce((a, b) -> a + "\n" + b)
        .orElse("");
    state.freeRef();
    return description;

Returns

    

And regressed input:

TrainingTester.java:622 executed in 0.01 seconds (0.000 gc):

    return RefArrays.stream(RefUtil.addRef(data)).flatMap(x -> {
      return RefArrays.stream(x);
    }).limit(1).map(x -> {
      String temp_18_0015 = x.prettyPrint();
      x.freeRef();
      return temp_18_0015;
    }).reduce((a, b) -> a + "\n" + b).orElse("");

Returns

    [
    	[ [ 0.044, -0.236, -0.1, 0.756, 0.58, 1.5452639335890868, -0.816, 1.8569099369840614, ... ], [ -1.084, -0.788, -1.76, -1.088, -0.216, 1.348, -1.688, -0.3, ... ], [ 1.044, 1.5786114154168385, -1.972, -0.148, 1.6047890087198575, -0.18, 0.8001009718870055, 0.716, ... ], [ -1.628, -0.26, -0.732, 1.036, -0.424, -0.932, 0.548, -1.952, ... ], [ 1.7352483713056817, -0.064, 1.468, -0.476, -1.948, 0.9969635506569285, 0.4, 1.22, ... ], [ 0.384, 1.504, 1.092, 1.5750800848273392, 1.844, 0.88, -1.772, 1.716, ... ], [ 1.116, 1.192, -1.636, -1.568, 1.7606843141029813, -1.688, 1.5710090475061316, 1.7110164929658729, ... ], [ -1.068, -1.224, -0.88, 0.088, -1.5, 1.7587061318451154, 0.132, 0.6, ... ], ... ],
    	[ [ -0.348, -0.072, 0.488, 0.412, 0.444, 0.688, -0.356, -0.916, ... ], [ -1.844, -1.196, 1.922749767329383, -0.768, -1.516, 1.5271810567143447, 0.624, -1.904, ... ], [ 1.5119763208929309, 0.084, 1.828, 1.5445933204045752, 0.468, -0.1, -0.396, -1.704, ... ], [ 0.16, 0.432, 1.296, -1.668, 1.14, -0.036, 0.9562356010696793, 1.3698635094523908, ... ], [ -1.592, -1.264, 1.6654322200674292, -0.768, -1.812, 0.0, -0.344, 1.064, ... ], [ 1.5496241856528408, 1.6175406374141759, -0.144, -0.452, 1.68, 0.736, 1.096, 0.304, ... ], [ -1.708, -0.436, 1.3309554338332648, -0.152, 0.544, 0.18, -1.112, -0.884, ... ], [ 0.456, 0.14, 0.856, 0.064, -1.188, -1.504, 1.450045496849203, 1.875693361609113, ... ], ... ],
    	[ [ 1.0242019437740109, -0.356, -1.516, 0.476, -0.004, 0.9630900630159386, -0.204, 0.288, ... ], [ 0.524, -0.656, -1.496, -1.028, -0.996, -1.708, 1.4703795370770298, -0.5, ... ], [ -1.8, 0.452, 1.092, -0.772, -1.584, -1.748, -1.116, -0.792, ... ], [ -1.384, 0.24, 1.6913767450296269, -1.308, 0.9230763620974687, 0.452, -0.4, 1.75764786475991, ... ], [ -1.08, 0.912, 0.888, 1.132, -0.868, -0.356, 0.212, -0.908, ... ], [ -1.928, 1.436, 1.6649336160911308, 0.056, 1.20211281144054, 1.518163892384549, -0.476, 0.284, ... ], [ 1.42, -1.688, 1.432, -1.26, -0.136, -0.308, 0.052, -1.0, ... ], [ -0.34, 1.06, -0.26, -0.448, -1.852, 1.42, -0.64, 0.396, ... ], ... ],
    	[ [ 0.732, 1.512, -0.968, 1.8860118395535346, 0.7194166577840241, -0.704, 1.596, 1.644, ... ], [ -1.748, 1.8481819873968124, 0.648, -1.62, 0.024, 0.7588407610277892, 0.424, 0.016, ... ], [ -1.396, -1.704, 1.657317547261954, 0.124, 0.544, -0.584, -0.136, 1.8943658361585598, ... ], [ 1.8201620310196136, 1.4472383931170822, 0.904, 1.8311710785257453, 0.256, -0.816, 0.816, -1.484, ... ], [ -1.3, 0.768, -0.34, 0.328, -1.656, 0.452, -1.74, 0.048, ... ], [ 1.5359426635972624, 0.548, 1.068, 0.344, -1.692, 0.356, 0.556, 1.228, ... ], [ -0.736, -0.74, -1.296, 0.36, 0.448, -1.016, 1.4270264711544722, 1.7365796194861054, ... ], [ -0.52, -0.524, 0.648, -1.184, 1.469556871061504, -1.908, -0.904, -0.144, ... ], ... ],
    	[ [ 1.9304331507498969, 0.882951711103394, 1.8, -1.392, -0.09072981095218448, 0.528, -0.144, -1.292, ... ], [ -1.388, -0.304, 1.84, -0.704, -0.512, 0.192, -1.036, -1.752, ... ], [ 1.586921776537596, 0.496, 0.284, -1.204, -0.796, 0.744, 0.796, 1.3695095128473656, ... ], [ -1.16, 1.24, 0.592, 0.884, -0.476, -1.552, -0.488, -0.844, ... ], [ 1.3891255816765422, 1.648, -1.2, 0.88, 1.848, 1.5854322200674293, -0.152, 1.468, ... ], [ 0.504, 0.972, -1.44, -0.432, -1.692, -0.188, -1.564, 1.9355013960237015, ... ], [ -1.836, -1.68, 1.5011255816765423, 1.5959526417858616, -0.836, 1.32, 1.196, 1.876, ... ], [ 0.784, -1.8, -0.688, -0.884, -1.932, 1.248, -1.468, -1.892, ... ], ... ],
    	[ [ 1.24, -1.4, -1.424, 1.0500218177421339, -0.856, 0.9050782234624039, -1.068, -0.544, ... ], [ 0.572, -0.704, 1.73460515995811, -1.48, -0.344, -1.78, -0.044, -0.856, ... ], [ 0.844, -0.304, -0.272, 0.18, -0.924, -0.696, 1.7212302762934184, -1.564, ... ], [ 1.296, 1.8101028332519409, -0.628, -1.336, 1.772209389233752, 0.644, 0.812, -1.68, ... ], [ -1.788, 1.596, 1.6840274018369397, 1.3638753490059252, 0.04, 1.472, -0.352, 1.692555940379036, ... ], [ -1.264, -0.544, 0.996, -1.42, 1.096, 1.044, 1.6992084585512843, 0.568, ... ], [ 0.844, -1.136, -1.612, -0.732, 1.7146014372282392, 0.092, -0.008, 1.348, ... ], [ -1.612, 1.5195450315079693, 1.012, 0.892, 1.348, -0.948, -0.648, 1.5477943334961184, ... ], ... ],
    	[ [ -1.432, -0.708, 0.5605422394605662, 0.44, -0.908, -1.128, 1.312, -0.504, ... ], [ -1.196, -1.964, -1.732, -1.552, 1.7, 1.691444059620964, -1.148, -1.824, ... ], [ -1.916, 1.7330919243808738, -1.692, -0.56, -1.416, 0.284, 0.364, -1.936, ... ], [ -1.644, -0.636, -0.524, -1.424, 1.628, -0.208, -1.044, -1.144, ... ], [ -0.064, -0.02, -1.872, -1.984, -1.148, -0.752, 1.332, -1.78, ... ], [ 1.9437843553075191, -1.38, 1.164, 0.904, -1.264, -0.008, 0.368, -0.204, ... ], [ 0.864, -0.808, 1.7668071037321207, -0.632, 1.208, -1.096, 0.044, -1.152, ... ], [ -1.32, -1.928, -1.756, -1.916, 0.4240299345657979, -1.468, -0.988, 1.377752559376786, ... ], ... ],
    	[ [ -1.936, -0.808, -0.408, 0.9473331095453592, -1.304, -1.044, 1.3617961948610537, 1.5145677799325707, ... ], [ -0.7569173824438026, 1.24, 0.172, -1.264, 0.456, -0.888, -0.96, 1.24, ... ], [ -1.204, 0.008, -1.528, -0.232, 1.8122866820136883, 0.548, -1.704, -1.96, ... ], [ 1.9758753490059253, 1.4316123460993064, 1.79740854096036, -0.656, 1.292, 1.576613276781774, 0.496, 0.948, ... ], [ -1.388, -1.644, 0.324, 1.6715213524009003, 0.768, -0.232, 1.47483078283919, 1.4888152205557847, ... ], [ 0.372, 1.329782493942584, -1.912, -1.268, -1.92, 1.068, -1.512, -0.692, ... ], [ -1.184, -1.408, -0.164, -0.124, 1.7723166165794861, 1.404, 1.5029217765375962, -0.336, ... ], [ -1.272, -1.924, 1.028, 0.984, -0.032, -0.172, 0.572, -0.016, ... ], ... ],
    	...
    ]

To produce the following output:

TrainingTester.java:633 executed in 0.20 seconds (0.000 gc):

    Result[] array = ConstantResult.batchResultArray(pop(RefUtil.addRef(data)));
    @Nullable
    Result eval = layer.eval(array);
    assert eval != null;
    TensorList tensorList = Result.getData(eval);
    String temp_18_0016 = tensorList.stream().limit(1).map(x -> {
      String temp_18_0017 = x.prettyPrint();
      x.freeRef();
      return temp_18_0017;
    }).reduce((a, b) -> a + "\n" + b).orElse("");
    tensorList.freeRef();
    return temp_18_0016;

Returns

    [
    	[ [ 1.5119763208929309, 1.5786114154168385, 1.922749767329383, 1.5445933204045752, 1.6047890087198575, 1.5452639335890868, 0.8001009718870055, 1.8569099369840614, ... ], [ 1.7352483713056817, 1.5786114154168385, 1.922749767329383, 1.5445933204045752, 1.6047890087198575, 1.5271810567143447, 0.9562356010696793, 1.3698635094523908, ... ], [ 1.7352483713056817, 1.6175406374141759, 1.6654322200674292, 1.5750800848273392, 1.844, 0.9969635506569285, 1.5710090475061316, 1.716, ... ], [ 1.5496241856528408, 1.6175406374141759, 1.3309554338332648, 1.5750800848273392, 1.844, 1.7587061318451154, 1.5710090475061316, 1.875693361609113, ... ], [ 1.7138971667480591, 1.8895468928729044, 1.4994976732938308, 1.4990800848273391, 1.874045496849203, 1.8026251163353084, 1.450045496849203, 1.875693361609113, ... ], [ 1.824, 1.8895468928729044, 1.4994976732938308, 1.4990800848273391, 1.874045496849203, 1.8026251163353084, 1.9880909936984061, 1.8876597043134444, ... ], [ 1.824, 1.641115603487943, 1.4689535724683291, 1.43811281144054, 0.9475313305894993, 0.8615768274387025, 1.9880909936984061, 1.8876597043134444, ... ], [ 1.6315687106150383, 1.641115603487943, 1.5278279907917869, 0.9605659185676355, 1.0900654532264016, 1.118089132333471, 1.4720573364027376, 1.7172838899662854, ... ], ... ],
    	[ [ 1.0242019437740109, 1.8481819873968124, 1.657317547261954, 1.8860118395535346, 0.7194166577840241, 0.9630900630159386, 1.596, 1.8943658361585598, ... ], [ 1.8201620310196136, 1.8481819873968124, 1.6913767450296269, 1.8311710785257453, 0.9230763620974687, 0.7588407610277892, 1.4703795370770298, 1.8943658361585598, ... ], [ 1.8201620310196136, 1.4472383931170822, 1.6913767450296269, 1.8311710785257453, 1.20211281144054, 1.518163892384549, 1.4270264711544722, 1.75764786475991, ... ], [ 1.5359426635972624, 1.575865370817326, 1.6649336160911308, 1.0489934852227265, 1.469556871061504, 1.518163892384549, 1.5815468928729044, 1.7365796194861054, ... ], [ 1.916876279688393, 1.575865370817326, 1.5479663427043315, 1.0489934852227265, 1.956, 1.6573075690733545, 1.5815468928729044, 1.4814696000929681, ... ], [ 1.9500118395535346, 1.9431137421230076, 1.4754203805138946, 0.6270701066387399, 1.956, 1.7960810155098068, 1.4363402956865554, 1.3971355598651416, ... ], [ 1.9500118395535346, 1.9431137421230076, 1.721580550168573, 0.6270701066387399, 1.7059781822578661, 1.7960810155098068, 1.4363402956865554, 0.7124175884664916, ... ], [ 1.513250232670617, 1.4214322200674292, 1.721580550168573, 0.5717469752819802, 1.2705677799325708, 1.1939981386350649, 1.6298635094523908, 1.2960336572956686, ... ], ... ],
    	[ [ 1.9304331507498969, 0.882951711103394, 1.84, 1.0500218177421339, -0.09072981095218448, 0.9050782234624039, 1.7212302762934184, 1.3695095128473656, ... ], [ 1.586921776537596, 1.8101028332519409, 1.84, 1.3638753490059252, 1.848, 1.5854322200674293, 1.7212302762934184, 1.692555940379036, ... ], [ 1.3891255816765422, 1.8101028332519409, 1.6840274018369397, 1.5959526417858616, 1.848, 1.5854322200674293, 1.6992084585512843, 1.9355013960237015, ... ], [ 1.1407479059644479, 1.5195450315079693, 1.5011255816765423, 1.5959526417858616, 1.7146014372282392, 1.8230701066387398, 1.6992084585512843, 1.9355013960237015, ... ], [ 1.7176005065457716, 1.7679763208929309, 1.7954104023252955, 1.1087379277758487, 1.4372028744564787, 1.84, 1.6868881192419276, 1.5477943334961184, ... ], [ 1.7176005065457716, 1.7679763208929309, 1.7954104023252955, 1.668516027624639, 1.3259644813393963, 1.9586824527380462, 1.6868881192419276, 1.3413075690733545, ... ], [ 1.1914066795954248, 1.655272050412751, 1.0725085821648979, 1.668516027624639, 1.3377762384838552, 1.9586824527380462, 1.9219445249621976, 1.5203876539006937, ... ], [ 1.4658971667480591, 1.724023679107069, 1.7958516698988563, 1.824, 1.3377762384838552, 1.866045496849203, 1.9219445249621976, 1.7475687106150384, ... ], ... ],
    	[ [ -0.7569173824438026, 1.7330919243808738, 0.5605422394605662, 0.9473331095453592, 1.8122866820136883, 1.691444059620964, 1.3617961948610537, 1.5145677799325707, ... ], [ 1.9758753490059253, 1.7330919243808738, 1.79740854096036, 1.6715213524009003, 1.8122866820136883, 1.691444059620964, 1.47483078283919, 1.4888152205557847, ... ], [ 1.9758753490059253, 1.4316123460993064, 1.79740854096036, 1.6715213524009003, 1.7723166165794861, 1.576613276781774, 1.5029217765375962, 1.4888152205557847, ... ], [ 1.9437843553075191, 1.329782493942584, 1.7668071037321207, 1.0682493019881494, 1.7723166165794861, 1.82211281144054, 1.7089772515753985, 1.377752559376786, ... ], [ 1.6248625787699231, 1.7988444837576598, 1.8835013960237015, 1.7235687106150384, 0.4240299345657979, 1.82211281144054, 1.8848189432856552, 1.377752559376786, ... ], [ 1.98164786475991, 1.7988444837576598, 1.8835013960237015, 1.731454037809563, 1.4068208046505906, 1.5982274842460154, 1.8848189432856552, 1.062884396512057, ... ], [ 1.98164786475991, 1.085940802232327, 1.4572265535635476, 1.731454037809563, 1.4068208046505906, 1.8016615656783799, 1.8349554338332645, 0.9013611827462217, ... ], [ 0.582075431415001, 1.0648725569585225, 1.1354066795954247, 1.575932685408663, 1.780808965097056, 1.8016615656783799, 1.6593967014068256, 1.7715450315079693, ... ], ... ],
    	[ [ 1.0521346291826739, 0.07676786234164637, 1.61435213524009, 1.8239663427043316, 1.756081015509807, 1.8736815220555785, 1.7445596631089066, 1.7941701478432777, ... ], [ 0.9351673557958748, 1.0985777581211702, 1.82540854096036, 1.8703084997558221, 1.5313967014068255, 1.8736815220555785, 1.7445596631089066, 1.7218398303453215, ... ], [ 1.716397632089293, 1.3145004653412338, 1.82540854096036, 1.8703084997558221, 1.6798180126031876, 1.55694359427973, 1.611477716916632, 1.7218398303453215, ... ], [ 1.716397632089293, 1.316, 1.5429080756191262, 1.503376745029627, 1.6798180126031876, 1.033236531752147, 0.9605222830833677, 1.245513235577236, ... ], [ 1.624344018416426, 1.7878853271945245, 0.938951711103394, 1.0648052423671854, 1.613250232670617, 1.8338298521567225, 1.7535468928729046, 1.245513235577236, ... ], [ 1.624344018416426, 1.7878853271945245, 1.48211281144054, 1.2782137833275455, 1.1504630853156947, 1.8662274842460154, 1.7535468928729046, 1.7163003829321581, ... ], [ 1.0597880780373898, 0.5184630853156947, 1.518456829856966, 1.8431137421230077, 1.1504630853156947, 1.8662274842460154, 0.7857014784327772, 1.7163003829321581, ... ], [ 1.7365123048947684, 1.8887516286943185, 1.518456829856966, 1.8431137421230077, 1.7084549684920307, 1.4273393650040878, 1.6772838899662854, 1.4045796194861053, ... ], ... ],
    	[ [ -0.5098472758050627, 1.4605496849203075, 1.078898097430527, 1.8955687106150383, 1.7956634270433152, 1.435798056225989, 0.7635849442623664, 1.2990663839088692, ... ], [ 0.7160835482386649, 1.8424767862341647, 1.6006606349959123, 1.4931592389722108, 1.207918984490193, 1.5834440596209638, 0.7635849442623664, 1.183191034902944, ... ], [ 0.7160835482386649, 1.8424767862341647, 1.624, 1.8006843141029814, 1.6068744183234578, 1.6326032985931744, 1.4386487954423777, 1.69012651235901, ... ], [ 1.818658773630977, 1.387386723218226, 1.624, 1.8006843141029814, 1.817991883176336, 1.6326032985931744, 1.4386487954423777, 1.69012651235901, ... ], [ 1.818658773630977, 1.387386723218226, 1.7110900630159385, 1.5604549684920308, 1.817991883176336, 1.559491417835102, 0.6369972079525971, 1.4045023267061691, ... ], [ 1.6904767862341645, 1.732411333007763, 1.7110900630159385, 0.7985877363097694, 1.6910364493430714, 1.1319052835717232, 1.692, 1.6675487542378398, ... ], [ 1.71505640572027, 1.984, 1.7131118807580723, 1.6448289214742549, 1.6752620722241516, 1.1319052835717232, 1.692, 1.6675487542378398, ... ], [ 1.71505640572027, 1.984, 1.7131118807580723, 1.7426587736309769, 1.4800099781885991, 1.8558180126031878, 1.6734322200674292, 1.2616915002441778, ... ], ... ],
    	[ [ 1.256081015509807, 1.5657725157539846, 1.0452065971863491, 1.7503284561330208, 0.7868033810022501, 0.8149180538077255, 1.635228414928483, 1.4713057077084193, ... ], [ 1.9180691759562722, 0.7125085821648978, 1.0452065971863491, 1.7503284561330208, 1.87177437711892, 1.601978182257866, 1.635228414928483, 1.7988407610277892, ... ], [ 1.9180691759562722, 1.6165895976747047, 0.9083303174979561, 1.4633057077084193, 1.87177437711892, 1.601978182257866, 1.7990900630159385, 1.8720137009184699, ... ], [ 1.875784355307519, 1.6165895976747047, 1.69670427048018, 1.7512483713056817, 1.797287612696156, 1.0194876951052316, 1.7990900630159385, 1.8720137009184699, ... ], [ 1.3858061730496531, 0.7403303174979561, 1.7002156446924808, 1.7512483713056817, 1.797287612696156, 1.2357943334961186, 1.13340854096036, 1.3781601696546784, ... ], [ 0.7383047770259517, 1.7390464275316708, 1.8135232137658355, 1.3339445249621975, 1.086193826950347, 1.3704531071270956, 1.5994876951052317, 1.3781601696546784, ... ], [ 1.1195550096965685, 1.7390464275316708, 1.8135232137658355, 1.3339445249621975, 0.9885796194861054, 1.3704531071270956, 1.5994876951052317, 0.9684175884664916, ... ], [ 1.1195550096965685, 1.5829990693175324, 1.6425515462852427, 0.5973848618532909, 1.035123720311607, 1.6379208458551284, 1.928081015509807, 1.5040772927799362, ... ], ... ],
    	[ [ 1.7313393650040876, 1.028771585071517, 1.0345777581211701, 0.7672920067899494, 0.6539881604464655, 1.5118853271945245, 0.2823484125102194, 1.379032726613201, ... ], [ 1.7313393650040876, 1.318975390210463, 1.4627260882223139, 1.4899308240437277, 1.2460417741193326, 1.5118853271945245, 1.3826687518195762, 1.6343047770259516, ... ], [ 1.9250919243808737, 1.3511374212300769, 1.8693649054760921, 1.6904094716428277, 1.2460417741193326, 1.6369672733867993, 1.3826687518195762, 1.6343047770259516, ... ], [ 1.9640909936984061, 1.3511374212300769, 1.8693649054760921, 1.7364786475991, 1.0663621134286894, 1.6369672733867993, 1.261236531752147, 1.0942474406232139, ... ], [ 1.9640909936984061, 1.2154640159981622, 1.2341028332519408, 1.7364786475991, 1.8202493019881494, 1.2780891323334709, 1.261236531752147, 1.3864904871526345, ... ], [ 1.5497488366469154, 1.8602493019881494, 1.2671910349029438, 1.716, 1.8202493019881494, 1.4499382695034688, 1.8273867232182261, 1.3864904871526345, ... ], [ 1.5583994934542282, 1.8602493019881494, 1.2671910349029438, 1.716, 1.7573412263690231, 1.4499382695034688, 1.8273867232182261, 1.14, ... ], [ 1.6316871061503844, 1.6860255404720046, 1.0213948400418902, 1.5190227484246015, 1.588185710126683, 1.2329872297639977, 1.8596933616091131, 1.40046494668063, ... ], ... ],
    	...
    ]

TrainingTester.java:432 executed in 0.14 seconds (0.000 gc):

    return TestUtil.compare(title + " vs Iteration", runs);
Logging
Plotting range=[0.0, -1.0075461671903256], [2.0, -0.6368107846708009]; valueStats=DoubleSummaryStatistics{count=3, sum=0.559828, min=0.098277, average=0.186609, max=0.230775}
Only 1 points for GD
Only 1 points for CjGD
Only 1 points for LBFGS

Returns

Result

TrainingTester.java:435 executed in 0.01 seconds (0.000 gc):

    return TestUtil.compareTime(title + " vs Time", runs);
Logging
Plotting range=[-1.0, -1.0075461671903256], [1.0, -0.6368107846708009]; valueStats=DoubleSummaryStatistics{count=3, sum=0.559828, min=0.098277, average=0.186609, max=0.230775}
Only 1 points for GD
Only 1 points for CjGD
Only 1 points for LBFGS

Returns

Result

Results

TrainingTester.java:255 executed in 0.00 seconds (0.000 gc):

    return grid(inputLearning, modelLearning, completeLearning);

Returns

Result

TrainingTester.java:258 executed in 0.00 seconds (0.000 gc):

    return new ComponentResult(null == inputLearning ? null : inputLearning.value,
        null == modelLearning ? null : modelLearning.value, null == completeLearning ? null : completeLearning.value);

Returns

    {"input":{ "LBFGS": { "type": "NonConverged", "value": 0.23077524214427925 }, "CjGD": { "type": "NonConverged", "value": 0.09827743947114276 }, "GD": { "type": "NonConverged", "value": 0.23077524214427925 } }, "model":null, "complete":null}

LayerTests.java:425 executed in 0.00 seconds (0.000 gc):

    throwException(exceptions.addRef());

Results

detailsresult
{"input":{ "LBFGS": { "type": "NonConverged", "value": 0.23077524214427925 }, "CjGD": { "type": "NonConverged", "value": 0.09827743947114276 }, "GD": { "type": "NonConverged", "value": 0.23077524214427925 } }, "model":null, "complete":null}OK
  {
    "result": "OK",
    "performance": {
      "execution_time": "211.147",
      "gc_time": "19.814"
    },
    "created_on": 1586735230796,
    "file_name": "trainingTest",
    "report": {
      "simpleName": "Asymmetric",
      "canonicalName": "com.simiacryptus.mindseye.layers.cudnn.PoolingLayerTest.Asymmetric",
      "link": "https://github.com/SimiaCryptus/mindseye-cudnn/tree/59d5b3318556370acb2d83ee6ec123ce0fc6974f/src/test/java/com/simiacryptus/mindseye/layers/cudnn/PoolingLayerTest.java",
      "javaDoc": ""
    },
    "training_analysis": {
      "input": {
        "LBFGS": {
          "type": "NonConverged",
          "value": 0.23077524214427925
        },
        "CjGD": {
          "type": "NonConverged",
          "value": 0.09827743947114276
        },
        "GD": {
          "type": "NonConverged",
          "value": 0.23077524214427925
        }
      }
    },
    "archive": "s3://code.simiacrypt.us/tests/com/simiacryptus/mindseye/layers/cudnn/PoolingLayer/Asymmetric/trainingTest/202004124710",
    "id": "e4b67179-6a10-48b3-ab45-ac000011d9dc",
    "report_type": "Components",
    "display_name": "Comparative Training",
    "target": {
      "simpleName": "PoolingLayer",
      "canonicalName": "com.simiacryptus.mindseye.layers.cudnn.PoolingLayer",
      "link": "https://github.com/SimiaCryptus/mindseye-cudnn/tree/59d5b3318556370acb2d83ee6ec123ce0fc6974f/src/main/java/com/simiacryptus/mindseye/layers/cudnn/PoolingLayer.java",
      "javaDoc": ""
    }
  }