1. Test Modules
  2. Training Characteristics
    1. Input Learning
      1. Gradient Descent
      2. Conjugate Gradient Descent
      3. Limited-Memory BFGS
    2. Results
  3. Results

Subreport: Logs for com.simiacryptus.ref.lang.ReferenceCountingBase

Test Modules

Using Seed 1663580842404089856

Training Characteristics

Input Learning

In this apply, we use a network to learn this target input, given it's pre-evaluated output:

TrainingTester.java:332 executed in 0.06 seconds (0.000 gc):

    return RefArrays.stream(RefUtil.addRef(input_target)).flatMap(RefArrays::stream).map(x -> {
      try {
        return x.prettyPrint();
      } finally {
        x.freeRef();
      }
    }).reduce((a, b) -> a + "\n" + b).orElse("");

Returns

    [
    	[ [ 0.192, -1.456, 0.84, -0.632, 0.54, -1.344, -1.152, 0.776, ... ], [ -0.28, 1.104, -1.308, 0.688, -0.576, 1.352, -1.48, -0.216, ... ], [ -1.828, 0.004, -0.396, 0.08, 1.256, 1.112, -1.808, 1.456, ... ], [ -0.52, 1.148, 1.268, 1.952, -0.532, 0.936, -1.692, 1.032, ... ], [ -0.744, 0.216, 0.22, -1.316, -1.404, 0.928, -1.572, 0.008, ... ], [ 0.788, 1.332, 1.436, -0.972, -1.696, 0.164, -0.068, -1.172, ... ], [ -1.272, -1.224, 0.832, 1.496, 0.632, 0.84, -1.372, 0.84, ... ], [ 0.652, 0.136, 0.752, 1.504, -1.8, 1.28, 1.1, -0.496, ... ], ... ],
    	[ [ 1.468, 0.984, -1.384, -0.252, -0.22, -1.4, 0.584, -1.88, ... ], [ -1.26, 1.116, 0.22, 1.416, -0.952, -0.884, 0.82, 1.464, ... ], [ 0.072, -0.456, -1.724, 0.704, 0.996, -1.64, -0.576, 0.04, ... ], [ 0.928, 1.908, 1.432, 1.684, -1.964, 0.98, -1.824, 0.912, ... ], [ 0.92, -0.736, 1.332, 1.508, 0.632, 0.22, -0.688, -0.568, ... ], [ -1.86, -1.432, 0.456, 0.144, 0.204, 0.704, -1.308, 0.532, ... ], [ 1.008, -1.736, 1.612, 1.232, 1.944, -0.232, -0.992, -1.0, ... ], [ -0.736, 1.204, 0.22, -0.416, -1.54, -0.444, 1.764, -0.008, ... ], ... ],
    	[ [ -1.708, 0.164, -0.232, 0.464, 0.564, 0.988, 1.076, -0.392, ... ], [ -0.072, 0.232, 0.088, -0.852, 1.424, 1.384, 0.408, -1.948, ... ], [ -1.296, -1.936, 1.86, 1.148, -1.888, 0.18, 0.988, 1.984, ... ], [ 0.432, 1.98, 0.228, 0.424, -0.432, -1.524, -1.208, -1.228, ... ], [ 1.288, 1.668, -1.776, -0.228, 1.488, -0.556, -0.144, 1.972, ... ], [ -0.668, 1.708, 1.752, -1.376, 0.164, -1.968, -1.568, -0.112, ... ], [ 0.3, -1.4, -0.888, -1.572, -1.216, -0.172, 1.212, -0.268, ... ], [ -0.704, -1.048, 1.64, -1.072, -1.84, 1.72, 1.584, -1.932, ... ], ... ],
    	[ [ 0.68, -1.396, -1.776, -0.92, 1.54, -0.08, -1.976, -0.968, ... ], [ 1.408, -0.532, 1.96, 0.252, 1.016, -0.064, 0.268, 0.116, ... ], [ -0.86, 1.14, -1.196, 0.644, -1.88, 0.5, -1.712, 0.976, ... ], [ -1.784, -1.792, 1.152, -1.668, -1.808, -0.908, 1.804, 1.776, ... ], [ 1.804, -1.756, 0.12, 1.336, -0.776, 0.08, 1.42, 1.432, ... ], [ -1.352, 0.216, -1.52, 1.244, -0.42, 0.292, -1.748, -1.928, ... ], [ 0.164, 0.56, -0.532, 0.548, -1.532, 1.12, -1.52, 0.196, ... ], [ 1.756, -0.776, -0.044, 0.34, 1.012, -0.148, -1.424, -1.528, ... ], ... ],
    	[ [ 0.52, 1.724, -0.764, -0.36, 0.712, -0.832, 0.348, -0.912, ... ], [ -0.172, 0.336, 1.396, 1.056, 1.176, -1.472, 1.6, 0.188, ... ], [ -1.44, -1.216, 0.52, 0.916, -0.176, 1.124, 1.548, 1.088, ... ], [ -1.788, 1.968, -1.028, -1.104, 0.26, 0.464, 0.836, 1.844, ... ], [ -0.328, 1.076, 0.288, 0.14, 1.816, 0.132, -0.844, 0.112, ... ], [ -1.256, 0.6, -1.072, -0.408, 1.072, 1.528, 1.196, 0.016, ... ], [ 0.82, -1.86, -1.02, 0.228, 0.812, -1.532, 1.744, 0.056, ... ], [ 1.02, -1.284, -1.824, -1.94, -1.688, 1.816, -1.436, 0.132, ... ], ... ],
    	[ [ -1.08, -1.272, -0.964, 0.484, -0.788, 1.416, -1.772, -1.376, ... ], [ 1.68, -0.516, 0.604, 1.58, 1.76, 0.048, -1.488, 1.308, ... ], [ 0.78, -0.196, 1.892, 0.856, 1.7, 1.68, -0.436, -1.204, ... ], [ 0.416, -0.236, 0.924, 0.464, -0.148, -1.62, 0.52, -0.296, ... ], [ -1.436, 0.268, -1.828, -0.088, 0.704, -1.08, -1.208, 1.24, ... ], [ -1.7, 1.82, -0.076, -0.38, -0.696, -1.004, 1.828, 0.768, ... ], [ -0.38, 1.684, 1.856, 0.084, 0.344, -0.392, 1.488, 1.592, ... ], [ -0.16, 1.644, 1.988, 0.876, 0.148, -1.66, 0.364, -1.196, ... ], ... ],
    	[ [ 1.988, 1.924, 0.216, 1.176, 1.716, 0.884, 0.368, -0.512, ... ], [ 0.86, -1.652, -0.664, -0.04, 1.448, 0.276, -0.504, -0.632, ... ], [ 0.724, 0.06, 0.572, 0.98, 0.912, 1.844, -0.14, 1.696, ... ], [ 0.224, 1.772, -1.1, -1.344, -0.088, -0.416, 0.672, 0.328, ... ], [ -1.132, 1.208, -0.896, 0.764, -0.56, 1.324, 1.584, 0.24, ... ], [ -0.392, -1.936, 0.828, 0.016, -0.464, -0.804, 0.484, -0.372, ... ], [ 1.844, 1.124, -0.048, 1.816, 0.636, 1.544, 1.728, 0.532, ... ], [ -0.164, 1.856, 1.736, -0.592, -0.112, 0.628, 1.792, 0.78, ... ], ... ],
    	[ [ -0.12, 1.42, -1.132, -1.856, -0.928, 0.856, 0.192, 0.332, ... ], [ 1.156, 1.456, -1.176, -1.456, 1.768, -1.696, -0.068, 1.9, ... ], [ 0.908, -1.98, -1.18, 1.884, 0.656, 1.448, -1.628, -1.12, ... ], [ -0.096, -0.292, -1.66, 1.688, -1.232, -1.224, 0.908, -0.2, ... ], [ 1.956, -1.096, 1.972, -0.576, -1.572, -1.636, -1.252, -0.096, ... ], [ -0.636, 0.588, 0.868, 1.74, -1.748, 1.132, 0.044, -1.852, ... ], [ 0.668, 1.188, -0.044, -0.444, 0.264, -0.124, 0.528, -1.78, ... ], [ 1.64, 1.208, -1.252, 0.596, 1.704, 1.336, -1.716, -0.896, ... ], ... ],
    	...
    ]
    [
    	[ [ -1.148, 1.368, 1.936, -1.188, 1.796, -1.612, 0.3, 0.132, ... ], [ 0.836, 1.568, 1.78, -0.88, -1.116, 0.94, -0.292, -1.028, ... ], [ 0.288, -1.024, -0.232, 0.24, -0.012, -0.744, -0.336, -0.912, ... ], [ -1.444, -0.736, -0.664, -1.424, -1.876, 0.548, 0.984, -1.528, ... ], [ 1.232, 0.384, 0.2, -0.092, 0.244, 0.18, -1.8, 1.188, ... ], [ -0.464, 0.684, 1.364, 0.912, 0.596, -0.056, 1.992, 0.196, ... ], [ -1.208, -1.984, 0.312, -0.8, 1.944, -1.32, -1.268, 0.416, ... ], [ -0.976, 1.156, 1.068, 1.06, 0.568, -0.848, 1.876, -1.328, ... ], ... ],
    	[ [ -0.148, -0.008, 1.236, 1.56, 0.324, -1.196, -1.212, 1.816, ... ], [ 0.48, 0.508, 1.652, -1.032, -1.524, 0.744, 0.332, -0.356, ... ], [ 1.788, 1.504, -0.512, 0.048, -1.04, -1.404, -0.788, 1.232, ... ], [ 1.416, -0.792, 1.768, -1.048, 1.584, 1.232, -1.216, -0.752, ... ], [ -0.652, 1.472, -1.228, 0.472, -1.344, -0.464, 0.464, 1.816, ... ], [ 0.584, -0.504, -1.816, -0.1, -0.576, 1.412, 1.104, -1.984, ... ], [ 0.208, -1.04, 0.192, -0.3, 0.42, -0.064, 0.156, -1.276, ... ], [ -1.72, -0.072, -1.164, -1.4, 1.192, -0.068, -0.208, -0.076, ... ], ... ],
    	[ [ 1.308, -1.928, 1.504, -0.16, -1.012, 0.012, -1.572, 0.448, ... ], [ 1.152, 1.54, -1.244, -0.404, 1.64, -0.388, -0.088, -1.18, ... ], [ 0.348, 0.12, 1.14, 0.492, 0.788, 0.38, -1.3, -0.704, ... ], [ 0.14, -0.552, 0.36, -0.036, 0.552, -1.496, 0.704, 0.468, ... ], [ 1.52, 0.192, -1.06, -1.476, -1.904, 0.552, 0.288, 0.28, ... ], [ 0.772, 0.696, 0.38, 0.276, 1.884, -0.892, 1.56, 0.412, ... ], [ 0.18, -0.308, -1.988, 0.716, -1.836, 0.112, -1.22, -0.892, ... ], [ 1.996, 1.34, -1.144, 1.168, 1.348, -0.524, 0.148, -0.532, ... ], ... ],
    	[ [ 0.6, 1.852, 0.512, -0.208, 0.796, -0.868, 1.0, -1.48, ... ], [ -0.148, 1.044, 1.54, 1.072, -0.052, 0.724, -0.104, -0.728, ... ], [ -0.692, -1.376, 1.864, -1.688, 0.296, -0.728, 1.56, -0.576, ... ], [ -0.04, 0.744, 0.216, 1.916, -1.352, -1.812, -1.688, 1.892, ... ], [ 0.288, 0.228, 0.052, 1.832, -1.368, 1.424, 0.992, -0.396, ... ], [ 1.144, 1.636, 1.08, -1.232, 0.876, 0.14, 0.576, -1.616, ... ], [ -1.116, -1.396, 1.452, -0.368, 1.2, 1.584, 1.36, -1.608, ... ], [ -1.908, -0.16, 0.384, -1.916, -0.396, -1.856, -1.688, -1.896, ... ], ... ],
    	[ [ -0.512, 1.908, 0.396, -0.012, -1.012, -1.684, -0.34, 0.436, ... ], [ -0.952, -1.656, 0.228, 0.456, -0.94, -1.224, -1.336, -1.596, ... ], [ 0.14, -1.464, -0.64, -1.276, 0.02, 1.996, -0.384, 1.5, ... ], [ -1.86, -1.156, -1.5, -0.544, -0.632, -1.892, 0.856, -0.888, ... ], [ -0.06, -0.42, -1.2, -0.624, -0.744, -0.196, -1.332, 1.132, ... ], [ 1.668, -1.712, 1.344, -1.464, -1.24, -1.06, -1.464, -1.98, ... ], [ -1.696, 1.588, 0.88, -0.664, 0.004, 0.956, -1.74, 0.34, ... ], [ 1.256, -0.072, -0.776, 1.204, 0.416, -1.94, -1.892, 0.8, ... ], ... ],
    	[ [ 0.576, 0.8, -0.152, -1.116, 1.752, -0.892, 1.2, 1.796, ... ], [ 0.352, 0.284, -0.132, 1.984, 0.432, -1.488, 1.484, 1.196, ... ], [ -1.68, 1.26, 0.628, -0.532, -1.156, -1.172, 1.684, -0.496, ... ], [ 0.26, -1.22, -0.552, -0.792, 0.588, 1.62, -0.76, 0.024, ... ], [ 0.628, 0.352, -0.24, 1.136, -0.352, -1.404, -0.064, -0.636, ... ], [ -0.164, -0.808, -1.916, -0.188, -0.98, -1.988, 1.928, 0.812, ... ], [ 1.22, 1.592, -0.876, 0.144, -0.168, -0.864, -0.152, -1.96, ... ], [ -1.284, -1.056, -1.788, 1.204, -1.492, -1.808, -1.448, -1.396, ... ], ... ],
    	[ [ -0.788, 1.684, -0.844, 1.272, 1.688, 1.992, 1.932, 1.42, ... ], [ 0.38, 1.976, 1.564, 1.704, 0.008, -1.616, 0.896, -0.388, ... ], [ -1.672, 1.7, -0.048, -1.772, -1.124, -0.184, -1.38, -1.592, ... ], [ -0.52, -0.32, -1.484, 0.368, -0.908, -1.664, 0.648, -1.452, ... ], [ -0.34, -0.576, -1.092, -1.992, 1.764, -0.816, -1.796, -0.32, ... ], [ 0.296, -0.98, -0.952, 1.684, -1.044, -0.444, 2.0, 1.596, ... ], [ -1.7, 1.232, -0.376, -0.968, 0.464, 0.944, -1.844, 1.784, ... ], [ -0.168, -0.108, -0.14, -1.536, 0.624, 0.24, -0.092, 1.016, ..

...skipping 5531 bytes...

    6, 0.432, 1.224, -1.156, 1.28, -1.672, -1.956, ... ], [ -0.648, 1.328, 0.692, 0.692, -1.456, -1.644, -1.44, 0.616, ... ], [ 1.524, 0.572, -1.96, 0.48, 0.34, -0.376, 0.444, -0.108, ... ], [ -0.84, 0.976, 1.12, -0.344, 1.568, 0.528, -1.684, 1.272, ... ], [ -0.264, 1.212, 1.092, -1.152, -1.832, 1.512, 1.456, 1.396, ... ], [ 0.952, -1.864, -0.312, 0.308, 0.224, -0.248, 0.508, -0.876, ... ], [ -0.516, 0.936, -0.152, 1.416, -1.888, -0.964, 1.052, 0.012, ... ], [ 0.332, 1.748, -0.196, -0.184, 0.288, -1.788, 1.392, -0.18, ... ], ... ],
    	[ [ -1.596, -0.656, 0.676, -0.984, 0.344, -1.604, -1.596, 0.86, ... ], [ 1.788, -1.552, -0.744, -1.132, -0.796, -0.208, -1.032, 1.12, ... ], [ 1.352, 1.312, -1.696, 0.376, 1.764, 1.208, -1.588, 1.704, ... ], [ -1.58, -1.044, -1.168, -0.38, 0.944, -1.192, -0.676, 1.556, ... ], [ -0.516, -0.44, -0.892, 0.976, 1.94, -1.236, -1.868, 0.964, ... ], [ -0.948, 0.732, 1.804, -0.988, 1.516, -1.96, 1.864, -1.448, ... ], [ -0.672, 1.652, 0.772, -0.264, 1.644, -0.28, 0.364, -1.42, ... ], [ -1.512, 1.168, -1.668, 1.688, -0.3, -0.12, -0.344, 1.064, ... ], ... ],
    	[ [ 0.192, 1.2, -1.876, -1.584, 1.504, 0.056, -0.712, -1.856, ... ], [ 1.628, 1.484, 0.108, -1.656, -0.504, 1.048, -0.86, -0.936, ... ], [ -0.612, 1.768, -1.112, 0.52, 0.372, -0.208, 1.096, 1.992, ... ], [ 0.084, 0.396, -0.68, -0.792, -0.992, 0.752, -0.948, -1.42, ... ], [ -1.884, 1.228, 0.956, 1.856, -1.588, 0.152, -1.668, -0.608, ... ], [ 1.676, 0.8, 1.344, 0.632, 0.316, 1.596, -0.792, -1.244, ... ], [ 1.832, -1.852, -1.972, 0.988, 0.976, 1.88, 0.604, -1.956, ... ], [ 0.212, 0.908, -1.104, -0.024, 0.28, -1.608, 1.036, 1.216, ... ], ... ],
    	[ [ -0.42, -0.196, 1.504, 0.552, -1.704, 1.448, 1.816, 0.668, ... ], [ 1.4, 0.408, -0.424, -0.064, 0.1, 0.76, -1.832, -0.108, ... ], [ 0.292, 1.264, -0.98, -0.232, 0.704, 1.716, 0.884, -0.928, ... ], [ 0.288, -0.86, -1.356, 1.444, -0.56, 1.372, 1.372, -1.168, ... ], [ -1.288, 0.08, 1.452, 1.2, -0.84, -0.324, 1.824, -1.0, ... ], [ -0.564, 1.248, -0.752, -0.06, 0.944, -1.016, 1.2, -0.512, ... ], [ 1.264, 0.584, 1.48, 0.716, 0.752, 1.92, -0.812, 0.248, ... ], [ 1.404, 1.108, 0.376, -1.592, -1.46, 1.256, -0.132, 0.044, ... ], ... ],
    	[ [ 1.432, -0.736, -1.396, 1.064, 0.392, -0.332, -0.508, -1.144, ... ], [ 0.004, 0.772, -1.336, -1.088, -0.392, -1.204, 1.488, 1.144, ... ], [ -0.756, -0.68, 0.188, -0.344, 0.392, -0.584, -0.176, 0.136, ... ], [ 1.82, 0.64, 0.956, -1.904, 0.068, -1.224, -1.724, 0.884, ... ], [ 1.548, -1.372, 1.436, -1.844, -1.628, -1.092, -0.164, -0.6, ... ], [ 1.14, 0.64, -1.364, -0.748, 1.392, -1.924, -0.68, 1.34, ... ], [ -0.4, 0.028, -1.372, 1.316, -0.352, 0.108, -0.868, -0.912, ... ], [ 1.116, 0.46, -0.08, 1.248, -0.856, -1.804, -0.06, 0.568, ... ], ... ],
    	[ [ -1.952, 0.62, 0.368, -1.796, -0.712, -0.388, 0.26, 0.38, ... ], [ 1.824, 0.144, -1.416, -0.592, 0.884, 0.016, -1.1, 1.052, ... ], [ 1.396, -1.572, 0.904, 0.996, -1.66, -1.044, 1.332, -0.592, ... ], [ 0.352, -0.98, 1.88, 0.88, 1.304, -0.652, 0.716, -1.772, ... ], [ 0.536, -1.028, 1.836, -1.84, 0.112, -0.856, 0.108, -1.796, ... ], [ 1.26, -0.016, 1.604, -1.24, 1.688, -1.108, 1.812, 0.268, ... ], [ 0.168, 0.912, -1.536, -1.336, 0.784, -0.44, 1.072, 0.972, ... ], [ 0.272, -0.384, -0.036, -0.424, -1.844, -0.972, -1.144, -0.216, ... ], ... ],
    	[ [ -1.82, -0.904, 1.176, 1.796, -0.82, 1.496, 0.768, 0.764, ... ], [ -1.548, 1.588, -1.092, -1.084, -1.096, -0.548, -0.848, -0.544, ... ], [ 0.6, 0.332, 1.72, 1.032, 0.544, 0.368, 1.492, -0.848, ... ], [ -1.816, 1.452, -1.728, 0.392, -0.824, -0.744, -0.292, 1.312, ... ], [ 0.108, 0.4, 0.188, -0.3, -0.608, -0.504, -0.06, -1.184, ... ], [ 1.708, -0.096, 1.24, -1.9, 0.684, -0.972, 0.668, 1.448, ... ], [ -0.884, -0.696, -1.192, -0.612, 1.156, -0.348, -0.732, -1.136, ... ], [ 0.324, -0.556, 1.44, -1.092, 0.568, 0.66, 0.38, 1.592, ... ], ... ],
    	...
    ]
    [
    	[ [ -1.684, 1.34, 0.308, 0.88, -0.124, -0.156, -0.352, -1.584, ... ], [ 0.808, 0.512, -1.588, 1.56, 1.692, -1.676, 0.792, 1.132, ... ], [ -0.86, 1.792, -1.044, 1.36, -0.384, -0.104, 0.052, -1.052, ... ], [ -1.424, 1.92, -1.296, 1.504, -0.676, 0.964, -0.256, 0.716, ... ], [ 1.308, 0.472, -0.344, 1.6, -0.184, -0.672, -0.784, -1.86, ... ], [ 1.208, 0.928, 1.856, -0.08, -1.916, 1.564, -1.472, -0.168, ... ], [ -1.424, 0.14, 1.144, 0.124, -0.908, -0.804, 0.076, 1.04, ... ], [ 1.656, 1.984, -0.296, -1.08, 1.196, -0.144, 1.552, -0.688, ... ], ... ],
    	[ [ -0.736, 0.028, 1.188, 1.616, 1.276, 1.184, 1.424, 0.2, ... ], [ -0.492, 0.964, 1.532, 1.332, -0.2, -0.584, 0.54, 1.052, ... ], [ 1.24, -1.34, -1.74, 0.984, 1.7, 0.664, 1.364, -0.616, ... ], [ -1.884, -0.244, -0.792, -0.664, 1.336, 1.972, 1.264, 1.604, ... ], [ -1.028, 0.76, 1.508, 0.736, 0.492, 0.96, -0.896, 1.444, ... ], [ -1.056, 0.26, 1.836, 1.752, 1.836, -1.984, 1.1, 0.508, ... ], [ 1.796, -0.632, 0.848, -0.204, 0.608, -0.668, 1.424, 1.752, ... ], [ -1.416, 0.336, -0.428, -1.624, -1.572, 0.476, -0.988, -1.204, ... ], ... ],
    	[ [ -0.364, 1.448, -1.608, -0.256, -0.3, -1.076, -1.696, -1.276, ... ], [ 0.64, 0.744, -0.664, 0.18, 0.544, 1.756, 1.156, -1.88, ... ], [ 1.672, 1.132, -1.828, 1.604, -1.308, 0.32, 0.52, 1.692, ... ], [ 1.568, 1.716, -1.052, -0.76, 0.02, 1.18, -1.776, -0.288, ... ], [ -0.812, 1.564, 1.908, -1.996, -1.956, 1.164, 0.416, 0.176, ... ], [ -0.18, 1.952, -1.38, 1.276, -0.616, -1.624, -0.264, -1.248, ... ], [ -0.48, 1.536, -1.764, -1.876, 0.44, -1.86, -1.964, -0.044, ... ], [ -1.58, 0.108, 0.328, -1.86, 0.96, 0.18, -1.772, 1.012, ... ], ... ],
    	[ [ 0.876, -0.776, 0.984, -0.4, 0.0, -1.968, -0.816, 0.316, ... ], [ -0.632, -0.24, -0.988, -1.34, -0.112, 0.284, 1.852, 0.508, ... ], [ -0.028, 1.216, -1.492, -0.6, -1.312, -0.192, -1.78, 0.22, ... ], [ -0.688, 0.264, 0.684, -0.02, 1.816, -0.108, -1.664, 1.168, ... ], [ -1.28, -1.64, 1.46, -1.856, 0.36, -0.716, 0.736, -0.628, ... ], [ -0.4, -0.624, 0.372, 0.016, 1.712, -0.428, -0.22, 1.224, ... ], [ -1.064, 1.244, 1.652, 1.276, 1.904, -1.428, 1.42, -0.444, ... ], [ -1.944, 0.268, -1.24, -0.476, 0.928, -0.652, 0.52, -0.092, ... ], ... ],
    	[ [ -1.32, 1.344, -0.648, 1.64, -1.532, 0.716, 0.168, 1.2, ... ], [ 1.9, -1.224, 1.716, 1.288, 1.344, 1.984, 1.136, 1.78, ... ], [ -1.716, 1.98, 0.296, -1.424, 1.152, 1.668, -1.504, -0.844, ... ], [ 1.28, 1.132, 0.288, 1.728, 1.848, -1.552, -0.004, 0.24, ... ], [ -0.964, -0.404, 0.028, -0.228, 0.016, -1.12, 1.64, 1.5, ... ], [ 1.412, 0.836, 1.848, -1.064, 1.52, -1.336, -1.16, -0.712, ... ], [ -1.796, 1.592, 0.788, 1.608, -0.568, -0.704, -0.072, -1.032, ... ], [ 0.712, -0.596, 0.636, -1.808, 0.276, -0.488, 0.3, 0.484, ... ], ... ],
    	[ [ -1.24, 0.908, -1.208, -1.644, 0.044, -0.092, 1.012, 0.548, ... ], [ 0.644, 1.04, 0.264, -0.152, 1.556, 1.128, -0.112, 0.632, ... ], [ 1.592, -0.884, 1.44, -0.26, -1.336, 1.008, 0.6, 1.524, ... ], [ 0.732, 0.208, -1.32, -1.848, 0.688, 1.796, 1.028, -1.136, ... ], [ -1.856, -1.468, -0.96, 0.816, -0.592, 1.648, -1.348, 0.78, ... ], [ 1.968, 1.692, 1.54, 0.916, -0.528, 1.176, -0.816, 0.956, ... ], [ -1.3, -0.604, 0.168, -1.868, -1.172, 1.836, 0.916, -0.728, ... ], [ 0.128, -1.324, 0.648, 0.076, 1.092, -1.832, 1.864, 0.956, ... ], ... ],
    	[ [ -1.516, 1.192, -1.36, 1.14, -0.104, -0.176, -0.06, 1.848, ... ], [ 0.14, 0.896, 0.888, 0.676, -1.2, 0.172, 1.004, -1.488, ... ], [ 1.896, 1.26, -0.132, 0.128, 0.368, 0.716, -1.384, -0.268, ... ], [ 1.092, 1.516, -1.744, -1.484, 1.928, -1.188, 1.464, -0.216, ... ], [ 0.888, 1.816, -0.492, 1.424, 0.144, 1.36, 1.232, 0.864, ... ], [ -0.44, -0.808, 0.756, -1.228, 0.472, -0.256, 0.652, -1.664, ... ], [ -1.356, 1.748, -1.216, 1.228, 0.6, 0.288, -0.084, -1.12, ... ], [ 0.348, -0.068, 0.32, -0.896, 1.908, 0.884, -0.376, 0.232, ... ], ... ],
    	[ [ -1.604, 0.56, 1.416, 1.732, 0.44, 1.6, -1.956, 0.772, ... ], [ -0.088, -0.424, 1.624, -1.636, 1.68, -0.588, 1.52, -0.808, ... ], [ -1.992, 1.896, 1.652, 0.332, 0.652, 1.156, -0.02, 1.34, ... ], [ -0.992, -1.472, -0.62, -0.108, -1.892, 0.328, -0.596, 0.524, ... ], [ 1.496, -1.324, -1.644, 1.688, -1.42, 1.108, -0.644, 1.992, ... ], [ -1.864, -0.364, -0.984, -0.436, 0.516, -0.376, 1.796, -0.496, ... ], [ 1.82, -0.964, -1.764, -0.728, -1.556, -0.132, -0.296, -1.208, ... ], [ -1.168, -0.612, 1.232, 0.78, -1.064, 1.696, -0.028, 1.224, ... ], ... ],
    	...
    ]

Gradient Descent

First, we train using basic gradient descent method apply weak line search conditions.

TrainingTester.java:480 executed in 42.37 seconds (5.341 gc):

    IterativeTrainer iterativeTrainer = new IterativeTrainer(trainable.addRef());
    try {
      iterativeTrainer.setLineSearchFactory(label -> new ArmijoWolfeSearch());
      iterativeTrainer.setOrientation(new GradientDescent());
      iterativeTrainer.setMonitor(TrainingTester.getMonitor(history));
      iterativeTrainer.setTimeout(30, TimeUnit.SECONDS);
      iterativeTrainer.setMaxIterations(250);
      iterativeTrainer.setTerminateThreshold(0);
      return iterativeTrainer.run();
    } finally {
      iterativeTrainer.freeRef();
    }
Logging
Reset training subject: 1295340621666
Reset training subject: 1297309418095
Constructing line search parameters: GD
th(0)=0.8537718530625;dx=-2.668037040820336E-7
New Minimum: 0.8537718530625 > 0.8537712787085724
WOLFE (weak): th(2.154434690031884)=0.8537712787085724; dx=-2.66591432808354E-7 evalInputDelta=5.743539276403808E-7
New Minimum: 0.8537712787085724 > 0.8537707043548384
WOLFE (weak): th(4.308869380063768)=0.8537707043548384; dx=-2.6659134306553934E-7 evalInputDelta=1.1487076616578662E-6
New Minimum: 0.8537707043548384 > 0.8537684069418348
WOLFE (weak): th(12.926608140191302)=0.8537684069418348; dx=-2.665909840942734E-7 evalInputDelta=3.446120665273611E-6
New Minimum: 0.8537684069418348 > 0.8537580686216014
WOLFE (weak): th(51.70643256076521)=0.8537580686216014; dx=-2.66589368723579E-7 evalInputDelta=1.3784440898612083E-5
New Minimum: 0.8537580686216014 > 0.853702931971674
WOLFE (weak): th(258.53216280382605)=0.853702931971674; dx=-2.6658075341321716E-7 evalInputDelta=6.892109082601294E-5
New Minimum: 0.853702931971674 > 0.8533583682800717
WOLFE (weak): th(1551.1929768229563)=0.8533583682800717; dx=-2.665269077234429E-7 evalInputDelta=4.1348478242830833E-4
New Minimum: 0.8533583682800717 > 0.8508796239671763
WOLFE (weak): th(10858.350837760694)=0.8508796239671763; dx=-2.6609038724353293E-7 evalInputDelta=0.0028922290953237173
New Minimum: 0.8508796239671763 > 0.8308515636862905
WOLFE (weak): th(86866.80670208555)=0.8308515636862905; dx=-2.609236484490494E-7 evalInputDelta=0.02292028937620949
New Minimum: 0.8308515636862905 > 0.6651152533297944
END: th(781801.26031877)=0.6651152533297944; dx=-2.1697977546069376E-7 evalInputDelta=0.18865659973270565
Fitness changed from 0.8537718530625 to 0.6651152533297944
Iteration 1 complete. Error: 0.6651152533297944 Total: 42.3584; Orientation: 3.1843; Line Search: 34.2920
Final threshold in iteration 1: 0.6651152533297944 (> 0.0) after 42.360s (< 30.000s)

Returns

    0.6651152533297944

This training apply resulted in the following configuration:

TrainingTester.java:610 executed in 0.00 seconds (0.000 gc):

    RefList<double[]> state = network.state();
    assert state != null;
    String description = state.stream().map(RefArrays::toString).reduce((a, b) -> a + "\n" + b)
        .orElse("");
    state.freeRef();
    return description;

Returns

    

And regressed input:

TrainingTester.java:622 executed in 0.01 seconds (0.000 gc):

    return RefArrays.stream(RefUtil.addRef(data)).flatMap(x -> {
      return RefArrays.stream(x);
    }).limit(1).map(x -> {
      String temp_18_0015 = x.prettyPrint();
      x.freeRef();
      return temp_18_0015;
    }).reduce((a, b) -> a + "\n" + b).orElse("");

Returns

    [
    	[ [ 1.612, -0.136, -1.268, -0.688, -1.768, 1.594284820648753, 1.8558553926287267, -0.324, ... ], [ -1.116, 0.304, 1.06, -1.076, 0.652, -0.4, 0.832, 1.55529572951982, ... ], [ -0.644, -0.19882452738046125, 1.196, 0.868, -1.96, 1.448, -0.932, -1.232, ... ], [ 1.1737961948610538, -0.804, 1.7515350533193699, 1.0320199563771986, -1.288, 1.8634340814323644, -0.06685073921638852, -1.368, ... ], [ 1.070989091128933, 1.6093985627717606, 1.86, -1.496, 0.32, 0.9560909936984061, -0.108, 1.4309117983489967, ... ], [ -0.44, 1.296, -0.752, -1.548, 0.008, -0.096, 0.732, 0.552, ... ], [ 1.0, 0.3402019437740109, 1.136, -0.18, 0.88, 0.9253512045576223, 0.548, -0.824, ... ], [ -1.196, -0.86, 1.272, -1.532, -0.644, -0.36, -0.204, 0.6047379277758484, ... ], ... ],
    	[ [ 1.7910464275316709, 0.136, 1.6967753078013876, 0.332, 1.2668544619462592, -0.596, -1.408, -0.928, ... ], [ -0.104, 0.4137251575398463, 1.496, 1.9005696412975062, 1.14, 1.152, 1.38, -1.016, ... ], [ -0.168, -0.74, -0.168, 0.372, 0.5818161512382524, 1.952, -0.176, 1.152, ... ], [ 0.74, -1.816, 1.428, 0.328, -0.768, 0.856, -1.168, 1.3752383931170824, ... ], [ 0.584, -1.456, 1.818739789140784, -0.792, 1.304, 0.136, -0.492, -0.208, ... ], [ 0.532, -1.48, 0.976, 0.45458773630976934, 1.7837307416346522, 0.144, 1.0029691347517344, -0.004, ... ], [ -0.708, -1.74, 0.028, -1.652, -0.968, -1.604, 0.52, 0.396, ... ], [ 1.127386723218226, -1.4, 1.7278753490059253, 0.699895305383124, 1.157452176444628, 0.18, 1.3742374624346148, -1.784, ... ], ... ],
    	[ [ 1.056, 1.19060515995811, 0.1, 0.08477158507151698, -1.62, -0.872, 1.6027061318451152, 1.748, ... ], [ -0.812, 0.332, 1.8757270189047817, -0.808, 0.964, 0.852, -0.236, 1.6294022855016315, ... ], [ -1.128, 0.5192683276828802, 1.5580218177421339, -0.852, 1.768, -0.492, 1.38, 1.80140854096036, ... ], [ 0.46711374212300766, -1.172, -1.2, -1.468, -0.36, -1.668, 1.9023184779444215, -1.444, ... ], [ 0.6944057489129571, 1.32, 1.93459145903964, -0.416, -0.612, 0.5834440596209638, -0.384, -0.692, ... ], [ 0.228, 1.6061701478432777, -0.972, 1.550193826950347, 0.884, 0.04, 1.0723739529822238, -0.96, ... ], [ -1.832, -0.07555873242643914, 1.7558753490059253, 0.7, 1.44, 1.8534322200674294, 0.86, 0.9965933204045754, ... ], [ 1.0, -0.448, 0.712, -1.252, 1.4438990281129946, 1.436, 1.429499534658766, 0.672, ... ], ... ],
    	[ [ -0.42, -1.52, 1.48, -1.26, 1.252067314591337, 1.640330317497956, -1.8, 1.592, ... ], [ 1.3553293868154885, 0.892, 0.672, -1.824, 0.216, 0.272, -0.444, 1.736, ... ], [ -2.0, -1.04, -0.488, -1.92, -1.34, -0.2760137009184699, -1.78, 1.268, ... ], [ 0.012, -0.152, 0.26, 1.2638753490059254, 1.6958927726542659, -1.352, -0.476, -1.808, ... ], [ -0.204, 0.312, -1.968, -1.14, -1.584, -0.66, -1.212, -0.05055154628524283, ... ], [ 0.12, 1.404, -1.584, -0.272, 0.9964076102778925, 0.484, -1.628, -0.74, ... ], [ 1.2292938681548846, -1.812, 1.396, 0.9623421570514907, 0.648, -0.82, -0.004, -1.292, ... ], [ 1.044, -0.756, -0.992, 0.544, -1.696, 1.836, 0.016, 0.196, ... ], ... ],
    	[ [ -1.076, -1.564, 1.4311137421230076, 0.744, -1.616, -1.144, -0.524, -1.288, ... ], [ 0.588, 0.212, -0.12, 0.208, -1.424, -1.848, 1.3858061730496531, 1.6170009306824675, ... ], [ 0.0742137833275455, 0.26, -0.972, -0.116, 0.068, 0.7494858337402963, 1.7692165753749485, -1.164, ... ], [ -1.316, 0.428, -0.124, -0.656, 1.0679526417858616, -1.364, 1.056, -0.24175628210665662, ... ], [ 1.0941065559818113, 0.5418597867225202, 0.652, -0.372, -1.552, 0.272, -0.072, -0.848, ... ], [ -0.896, -0.132, -0.128, -0.708, -1.12, -1.62, 1.6629654120218638, -1.732, ... ], [ -0.552, -1.644, 0.448, 1.7573549272874929, 0.67505640572027, -0.708, 0.304, 0.68, ... ], [ 0.8268744183234578, 0.5076896388792425, 1.128, -1.092, -0.844, -0.464, 1.62, 0.452, ... ], ... ],
    	[ [ -0.728, 1.1270663839088693, 1.232, -0.38, -1.348, -0.8137226248109883, 1.332, -0.548, ... ], [ 1.0830663839088692, -0.388, 0.944, 1.3306924309266455, -0.30117666262055126, -1.264, -0.86, -0.336, ... ], [ -1.96, 0.952, 0.112, -1.864, -1.892, 0.536, -0.844, -1.864, ... ], [ -1.576, 1.64846494668063, 1.8077270189047816, 1.4497288802697168, 0.468, -0.836, 0.092, -0.936, ... ], [ 0.288, -1.108, 0.681273911777686, -0.2532739117776861, -0.504, 1.6122729810952185, -0.508, -1.3, ... ], [ 0.212, -1.828, -0.216, -1.432, 0.16916549443093948, -0.592, -0.084, -0.19264067861871365, ... ], [ -0.508, 0.208, 1.7527379277758484, 0.448, -1.216, -1.564, 1.9546824527380462, -1.132, ... ], [ -0.52, -0.472, -1.632, -0.268, -1.216, 0.9100654532264016, -0.256, 1.7008526005813238, ... ], ... ],
    	[ [ -0.528, -0.16, 0.424, -0.904, 0.948, 1.702150191466079, -0.096, 1.5277943334961184, ... ], [ -1.912, -1.336, -0.536, 1.056, 1.2026687518195764, 1.152, -1.172, 0.348, ... ], [ 0.06, -1.224, -0.11974071982325146, 1.9577388584583162, 0.9611592389722108, 0.06, 0.9431137421230077, -0.824, ... ], [ 0.8904431289384962, -0.92, -1.812, -0.72, 0.252, -1.228, 0.772, -0.552, ... ], [ -1.496, 0.204, 0.708, 1.252, 1.6252676563189574, -0.588, -1.308, -0.796, ... ], [ -0.376, 0.876, 1.928, 0.404, 0.948, 1.8155923897221076, 1.8052165753749485, -1.204, ... ], [ -1.532, -1.216, -0.4, 0.064, -1.528, 1.9407852859899868, -1.852, 0.264, ... ], [ -1.072, 1.8384431289384962, -1.12, 1.144, 0.06, -1.076, 0.196, 1.6859345467735984, ... ], ... ],
    	[ [ -0.168, 1.7133175472619537, -1.684, 1.2251592389722108, -1.104, -0.616, 0.8876833834205138, -0.248, ... ], [ 1.7281583082897432, 1.22, 0.5530919243808737, 1.0, -1.48, 0.68, 0.784, 1.46, ... ], [ 0.1, 1.608, -0.78, -0.14, -1.684, -1.132, -1.248, -1.572, ... ], [ -0.616, 1.9019208458551284, -1.108, -0.636, -1.724, 1.327827990791787, -1.524, -0.038618860876579786, ... ], [ 0.656, 1.495932685408663, -1.112, 1.7435113742123007, -1.464, 0.724, 1.044, -0.644, ... ], [ 1.9349317547261953, -1.512, 1.9368862578769923, -1.192, 0.152, -1.868, 0.768, -0.03037581434715919, ... ], [ 1.2259981386350647, -1.856, 0.4648825351471217, 1.8897388584583161, -0.844, 0.936, -1.004, -1.608, ... ], [ 0.828, 1.7, 0.1, 0.576, 1.4406469340774422, -1.168, 0.9352246921986124, -0.068, ... ], ... ],
    	...
    ]

To produce the following output:

TrainingTester.java:633 executed in 0.10 seconds (0.000 gc):

    Result[] array = ConstantResult.batchResultArray(pop(RefUtil.addRef(data)));
    @Nullable
    Result eval = layer.eval(array);
    assert eval != null;
    TensorList tensorList = Result.getData(eval);
    String temp_18_0016 = tensorList.stream().limit(1).map(x -> {
      String temp_18_0017 = x.prettyPrint();
      x.freeRef();
      return temp_18_0017;
    }).reduce((a, b) -> a + "\n" + b).orElse("");
    tensorList.freeRef();
    return temp_18_0016;

Returns

    [
    	[ [ 1.7910464275316709, 0.4137251575398463, 1.6967753078013876, 1.9005696412975062, 1.2668544619462592, 1.594284820648753, 1.8558553926287267, 1.55529572951982, ... ], [ 1.1737961948610538, -0.19882452738046125, 1.7515350533193699, 1.0320199563771986, 0.5818161512382524, 1.952, -0.06685073921638852, 1.3752383931170824, ... ], [ 1.070989091128933, 1.6093985627717606, 1.86, 0.45458773630976934, 1.7837307416346522, 0.9560909936984061, 1.0029691347517344, 1.4309117983489967, ... ], [ 1.127386723218226, 0.3402019437740109, 1.7278753490059253, 0.699895305383124, 1.157452176444628, 0.9253512045576223, 1.3742374624346148, 0.6047379277758484, ... ], [ 1.8455232137658355, 1.7634203805138946, 0.9347597455179824, 1.3402829592838177, 1.0547834246250516, 1.8679227072200637, 1.3210445661667354, 1.50801370091847, ... ], [ 1.8608862578769925, 1.809010908871067, 1.6315687106150383, 1.5302374624346147, 1.4426251163353083, 1.5911037639344083, 0.9700317959307332, 1.2873430877339582, ... ], [ 1.825445920985899, 1.6322293456109507, 0.5770981798396025, 1.4632047358214137, 1.537354927287493, 1.0185515462852428, 0.9026487954423776, 1.588, ... ], [ 1.9147971255435214, 0.7773512045576223, 1.6891928962678793, 1.458045496849203, 1.1285459621904368, 1.3593194086268892, 0.8866388172537784, 1.1926569122660418, ... ], ... ],
    	[ [ 1.3553293868154885, 1.19060515995811, 1.8757270189047817, 0.08477158507151698, 1.252067314591337, 1.640330317497956, 1.6027061318451152, 1.748, ... ], [ 0.46711374212300766, 0.5192683276828802, 1.5580218177421339, 1.2638753490059254, 1.768, -0.2760137009184699, 1.9023184779444215, 1.80140854096036, ... ], [ 0.6944057489129571, 1.6061701478432777, 1.93459145903964, 1.550193826950347, 0.9964076102778925, 0.5834440596209638, 1.0723739529822238, -0.05055154628524283, ... ], [ 1.2292938681548846, -0.07555873242643914, 1.7558753490059253, 0.9623421570514907, 1.4438990281129946, 1.8534322200674294, 1.429499534658766, 0.9965933204045754, ... ], [ 1.62611281144054, 1.5098971667480592, 1.77270427048018, 1.6341638923845492, -0.9310301938843428, 1.299827990791787, 0.35432473340315024, 1.2414421982560284, ... ], [ 0.10520287445647854, 0.8041782646669418, 1.56423932379955, 1.3168426223927245, 1.22, 0.873303846343484, 1.2554340814323646, 1.976, ... ], [ -0.1705777581211701, 1.1996833834205138, 0.6673393650040877, 1.816, 1.3535095128473655, -1.0027534900592536, 1.2542648642715544, 1.026150191466079, ... ], [ 1.8630464275316707, 1.399784355307519, 0.5999526417858616, 1.5521009718870054, 1.4608363669339959, 1.866284820648753, 1.1652165753749484, 1.874763468247853, ... ], ... ],
    	[ [ 1.0830663839088692, 1.1270663839088693, 1.4311137421230076, 1.3306924309266455, -0.30117666262055126, -0.8137226248109883, 1.3858061730496531, 1.6170009306824675, ... ], [ 0.0742137833275455, 1.64846494668063, 1.8077270189047816, 1.4497288802697168, 1.0679526417858616, 0.7494858337402963, 1.7692165753749485, -0.24175628210665662, ... ], [ 1.0941065559818113, 0.5418597867225202, 0.681273911777686, -0.2532739117776861, 0.16916549443093948, 1.6122729810952185, 1.6629654120218638, -0.19264067861871365, ... ], [ 0.8268744183234578, 0.5076896388792425, 1.7527379277758484, 1.7573549272874929, 0.67505640572027, 0.9100654532264016, 1.9546824527380462, 1.7008526005813238, ... ], [ 1.8326942922915805, 1.2666687518195763, 0.5137824939425839, 1.8781028332519407, 1.4754876951052316, 1.805489556470167, 0.30605175230793175, 1.476330317497956, ... ], [ 1.900081015509807, 1.695161100337146, 1.208, 0.2926805913731108, 1.0813412263690232, 0.7182411851644852, 0.3667834246250516, 1.9154440596209639, ... ], [ 0.6100891323334708, 1.0569872297639977, 1.5793904459480967, 1.5841383519125445, 1.4571355598651414, 1.7712184367398836, -0.2792794958724919, -0.2899046122078004, ... ], [ 1.2306724745494468, 0.9364412675735608, 1.1598990281129946, 1.72635213524009, 1.2317369970933807, 1.2085796194861054, 1.1105204217184323, 1.3049099369840615, ... ], ... ],
    	[ [ 1.7281583082897432, 1.7133175472619537, 0.5530919243808737, 1.2251592389722108, 1.2026687518195764, 1.702150191466079, 0.8876833834205138, 1.5277943334961184, ... ], [ 0.8904431289384962, 1.9019208458551284, -0.11974071982325146, 1.9577388584583162, 0.9611592389722108, 1.327827990791787, 0.9431137421230077, -0.038618860876579786, ... ], [ 1.9349317547261953, 1.495932685408663, 1.9368862578769923, 1.7435113742123007, 1.6252676563189574, 1.8155923897221076, 1.8052165753749485, -0.03037581434715919, ... ], [ 1.2259981386350647, 1.8384431289384962, 0.4648825351471217, 1.8897388584583161, 1.4406469340774422, 1.9407852859899868, 0.9352246921986124, 1.6859345467735984, ... ], [ 1.2521346291826738, 1.7522530247180197, 1.6746487954423777, 1.736516027624639, 1.2506114154168384, 1.3486232549703732, 1.0252938681548847, 1.0754440596209638, ... ], [ 0.98142224187883, 0.22020194377401092, 1.6004549684920308, -0.08562790838271137, 0.8578398303453216, 1.1205659185676355, -0.010925499267466693, 1.3731355598651416, ... ], [ 1.87483078283919, 1.3283166165794862, 0.9220991105220702, 1.718500465341234, 1.1410208870596663, 1.3846070213230453, 1.2966032985931746, 1.7939781822578662, ... ], [ 1.6964549684920307, 1.9284886257876992, 1.020532261271967, 1.2136715438669792, 0.7591810567143447, 1.8931928962678792, 1.2630800848273391, 0.8433393650040878, ... ], ... ],
    	[ [ 0.8860891323334709, 1.0523402956865555, 1.066884396512057, 1.2141464687362085, 1.8493312481804236, 1.513954503150797, 1.549938269503469, 1.8233767450296268, ... ], [ 0.6794540378095631, 0.7129062142541909, 1.253685244785449, 1.683346810463829, 0.9688762796883931, 1.807467738728033, 1.4649099369840615, 1.5005496849203075, ... ], [ 0.03906266117899873, 1.7293312481804237, 0.931123720311607, -0.09696727338679917, 1.7986724745494467, 0.892532261271967, 1.1678990281129946, 1.225513235577236, ... ], [ 0.7908507392163885, 1.2153967014068254, 1.6442156446924807, 1.6122330683408213, 1.6108644401348584, 1.35553505331937, 1.8000037227298706, 1.3239900218114007, ... ], [ 1.5490682452738047, -0.17244126757356087, 1.1429753902104631, 1.6696515874897806, 1.4291430053248828, 1.7644886257876993, 0.11480338100225017, 1.9039663427043314, ... ], [ 1.0689572951981998, 1.1262137833275454, 1.535123720311607, 1.88988718855946, 0.38755500969656853, 1.422284820648753, -0.01893547745606597, 1.5256279083827113, ... ], [ 1.4328426223927246, 0.8743284561330208, 1.4485023267061692, -0.2652776345075567, 0.02220008240907559, 1.8412402544820174, -0.6996833834205138, 1.9640909936984061, ... ], [ 1.42871797139865, -0.4489572951981998, 1.61953505331937, 0.9362966602022876, 0.9644986039762986, 0.7696678211371086, 1.8348208046505907, -0.0443676975234952, ... ], ... ],
    	[ [ 1.194763468247853, 0.3321782646669417, 1.4170445661667352, 1.2605222830833678, 0.8528625787699231, 0.972, 1.5508781410533283, 1.8003303174979561, ... ], [ 1.3441146728054754, 1.635865370817326, 1.5873293868154885, 0.5706450727125071, 1.7664767862341646, 0.6013375036391525, 0.4234739941867617, 1.2764986039762984, ... ], [ 1.6240673145913371, 0.31917107852574533, 1.6752720504127507, 1.1646469340774424, 1.8001819873968123, 0.23104642753167076, 1.463932685408663, 0.7169062142541909, ... ], [ 1.2234640159981622, 0.5658261294268517, 1.9038180126031876, 1.7017862166724544, 1.9916023679107069, 1.7020454968492031, -0.25109750847567974, 0.9763502738751547, ... ], [ -0.3158017789558597, 1.0199900218114009, 0.8930782234624038, 1.6315450315079694, 0.22264879544237764, 1.7960336572956686, 1.3903857925357586, 0.36229107610748174, ... ], [ 1.3315687106150385, 1.6780691759562723, 1.6742611415416837, 1.4792184367398837, 1.7210009306824676, -0.06626486427155445, 1.1732365317521471, 1.474510443529833, ... ], [ 1.2613212699918244, 1.8890919243808737, 0.7496915002441779, 1.8777388584583161, 1.4571156034879431, 1.0367616068829177, 1.6499881604464652, 0.4506887081967748, ... ], [ 1.0042156446924808, 1.7477270189047815, 1.3457999175909243, 1.5810956471107445, -0.5565596631089067, 1.8541838487617475, 1.3376752665968499, 1.0826014372282393, ... ], ... ],
    	[ [ 1.3836460033949747, 1.924967273386799, 1.4931928962678793, 1.90377437711892, 0.25357682743870247, 0.4123365729566848, 1.8440909936984062, 1.875386723218226, ... ], [ 1.0244786475991, 1.006793402813651, 1.9297725157539847, 0.5828607174049878, 1.9134658773630977, 1.8448663014997937, 1.165806173049653, 1.6994141250551658, ... ], [ 1.184, 1.6291729398906807, 1.545513235577236, 1.2651928962678793, 1.6114876951052317, 1.1732975908847554, 0.44959052835717234, 1.938045496849203, ... ], [ 1.10988718855946, 1.684, 0.8743658361585599, 1.6087379277758487, 1.0951000412045377, 1.3952483713056816, 1.908, 1.3394640159981623, ... ], [ 1.2432284149284831, 0.8562019437740109, -0.471825458062929, 1.8234203805138947, 1.1850208870596664, 1.6583758143471592, 1.184838899662854, 1.6777151793512468, ... ], [ 0.196, 1.5233904459480967, 1.8890682452738046, 1.0191336985002064, 1.8036260470177763, 1.424, 1.9107734464364523, 1.8294658773630978, ... ], [ 1.6256378865713106, 1.0838043116847178, 1.2443166165794861, 0.3962692583653479, 0.3013611827462217, 0.8313767450296269, 1.1940218177421338, 1.8805796194861053, ... ], [ 1.8612639335890868, 1.646443128938496, 1.1796734052319144, 1.6491255816765422, 1.634059197767673, 1.360215644692481, 0.8922929374724171, 1.1401246509940748, ... ], ... ],
    	[ [ -0.21899281385880362, 1.6975705719799739, 0.2753019849785487, 0.4814048182304894, 1.8951037639344084, 1.38140854096036, 1.5131355598651415, 1.472320339309357, ... ], [ 1.286419449831427, 1.7179719267991373, 1.3127616068829178, 1.05481708192072, -0.11353691468430524, 1.3141464687362086, 1.7900492195790736, 1.912397632089293, ... ], [ -0.18980989577952373, 1.5952720504127509, 1.5171019025694732, 1.211918984490193, 0.7824294280200264, 0.16573885845831612, 1.2283502738751546, 1.1803639747936245, ... ], [ 1.5718753490059254, 1.4008563233111946, -0.12363974793624599, 1.2912383931170823, 0.4741165341704106, 1.8614658773630979, 1.736, 1.6128762796883929, ... ], [ 0.5493175472619539, 1.8043776757120944, 1.0291355598651415, 1.402935477456066, 1.3875687106150383, 1.2753057077084193, 1.8580255404720045, 1.6926706131845115, ... ], [ 1.9510464275316708, 1.7700791541448715, 1.5610445661667354, 1.815252094035552, 1.835798056225989, 1.4181028332519408, 1.0063758143471593, 1.6365596631089068, ... ], [ 1.0139881604464653, 0.9905341226369023, 1.8655232137658353, 1.8789317547261952, 1.6594440596209639, -0.3594477823508344, 1.6583421570514907, 0.852, ... ], [ 1.6879563645157323, 1.7413786063945622, 1.5671411439599474, 1.4741738705731482, 1.7686706131845116, 1.5301701478432777, 1.9330919243808737, 1.6069080756191263, ... ], ... ],
    	...
    ]

Conjugate Gradient Descent

First, we use a conjugate gradient descent method, which converges the fastest for purely linear functions.

TrainingTester.java:452 executed in 91.38 seconds (5.397 gc):

    IterativeTrainer iterativeTrainer = new IterativeTrainer(trainable.addRef());
    try {
      iterativeTrainer.setLineSearchFactory(label -> new QuadraticSearch());
      iterativeTrainer.setOrientation(new GradientDescent());
      iterativeTrainer.setMonitor(TrainingTester.getMonitor(history));
      iterativeTrainer.setTimeout(30, TimeUnit.SECONDS);
      iterativeTrainer.setMaxIterations(250);
      iterativeTrainer.setTerminateThreshold(0);
      return iterativeTrainer.run();
    } finally {
      iterativeTrainer.freeRef();
    }
Logging
Reset training subject: 1338171857198
Reset training subject: 1339622560725
Constructing line search parameters: GD
F(0.0) = LineSearchPoint{point=PointSample{avg=0.8537718530625}, derivative=-2.668037040820336E-7}
F(1.0E-10) = LineSearchPoint{point=PointSample{avg=0.8537718530625}, derivative=-2.6680285798633045E-7}, evalInputDelta = 0.0
New Minimum: 0.8537718530625 > 0.8537718530624998
F(7.000000000000001E-10) = LineSearchPoint{point=PointSample{avg=0.8537718530624998}, derivative=-2.666357239691429E-7}, evalInputDelta = -2.220446049250313E-16
New Minimum: 0.8537718530624998 > 0.8537718530624987
F(4.900000000000001E-9) = LineSearchPoint{point=PointSample{avg=0.8537718530624987}, derivative=-2.6659177595625217E-7}, evalInputDelta = -1.3322676295501878E-15
New Minimum: 0.8537718530624987 > 0.8537718530624907
F(3.430000000000001E-8) = LineSearchPoint{point=PointSample{avg=0.8537718530624907}, derivative=-2.6659152363906383E-7}, evalInputDelta = -9.325873406851315E-15
New Minimum: 0.8537718530624907 > 0.8537718530624361
F(2.4010000000000004E-7) = LineSearchPoint{point=PointSample{avg=0.8537718530624361}, derivative=-2.665915225511653E-7}, evalInputDelta = -6.394884621840902E-14
New Minimum: 0.8537718530624361 > 0.8537718530620519
F(1.6807000000000003E-6) = LineSearchPoint{point=PointSample{avg=0.8537718530620519}, derivative=-2.6659152255110205E-7}, evalInputDelta = -4.480860127387132E-13
New Minimum: 0.8537718530620519 > 0.8537718530593634
F(1.1764900000000001E-5) = LineSearchPoint{point=PointSample{avg=0.8537718530593634}, derivative=-2.665915225506809E-7}, evalInputDelta = -3.1366020891709923E-12
New Minimum: 0.8537718530593634 > 0.8537718530405451
F(8.235430000000001E-5) = LineSearchPoint{point=PointSample{avg=0.8537718530405451}, derivative=-2.6659152254774124E-7}, evalInputDelta = -2.1954882356567396E-11
New Minimum: 0.8537718530405451 > 0.8537718529088153
F(5.764801000000001E-4) = LineSearchPoint{point=PointSample{avg=0.8537718529088153}, derivative=-2.6659152252715706E-7}, evalInputDelta = -1.5368473160748408E-10
New Minimum: 0.8537718529088153 > 0.8537718519867072
F(0.004035360700000001) = LineSearchPoint{point=PointSample{avg=0.8537718519867072}, derivative=-2.6659152238307824E-7}, evalInputDelta = -1.0757927881854812E-9
New Minimum: 0.8537718519867072 > 0.8537718455319494
F(0.028247524900000005) = LineSearchPoint{point=PointSample{avg=0.8537718455319494}, derivative=-2.665915213745245E-7}, evalInputDelta = -7.530550627521393E-9
New Minimum: 0.8537718455319494 > 0.8537718003486461
F(0.19773267430000002) = LineSearchPoint{point=PointSample{avg=0.8537718003486461}, derivative=-2.6659151431463525E-7}, evalInputDelta = -5.271385394856054E-8
New Minimum: 0.8537718003486461 > 0.8537714840655571
F(1.3841287201) = LineSearchPoint{point=PointSample{avg=0.8537714840655571}, derivative=-2.665914648953919E-7}, evalInputDelta = -3.689969428899431E-7
New Minimum: 0.8537714840655571 > 0.8537692700855748
F(9.688901040700001) = LineSearchPoint{point=PointSample{avg=0.8537692700855748}, derivative=-2.665911189607236E-7}, evalInputDelta = -2.5829769252361245E-6
New Minimum: 0.8537692700855748 > 0.8537537723061416
F(67.8223072849) = LineSearchPoint{point=PointSample{avg=0.8537537723061416}, derivative=-2.6658869741802E-7}, evalInputDelta = -1.8080756358451744E-5
New Minimum: 0.8537537723061416 > 0.8536452917917388
F(474.7561509943) = LineSearchPoint{point=PointSample{avg=0.8536452917917388}, derivative=-2.665717466191199E-7}, evalInputDelta = -1.2656127076127177E-4
New Minimum: 0.8536452917917388 > 0.8528861213308232
F(3323.2930569601003) = LineSearchPoint{point=PointSample{avg=0.8528861213308232}, derivative=-2.6645309102680637E-7}, evalInputDelta = -8.85731731676831E-4
New Minimum: 0.8528861213308232 > 0.8475842010392235
F(23263.0513987207) = LineSearchPoint{point=PointSample{avg=0.8475842010392235}, derivative=-2.6523335938248494E-7}, evalInputDelta = -0.006187652023276535
New Minimum: 0.8475842010392235 > 0.8112219689172908
F(162841.3597910449) = LineSearchPoint{point=PointSample{avg=0.8112219689172908}, derivative=-2.5583995816139784E-7}, evalInputDelta = -0.042549884145209216
New Minimum: 0.8112219689172908 > 0.5911450639588095
F(1139889.5185373144) = LineSearchPoint{point=PointSample{avg=0.5911450639588095}, derivative=-1.9639058231679167E-7}, evalInputDelta = -0.26262678910369053
New Minimum: 0.5911450639588095 > 0.1730165487397643
F(7979226.6297612) = LineSearchPoint{point=PointSample{avg=0.1730165487397643}, derivative=3.836548969300879E-8}, evalInputDelta = -0.6807553043227357
0.1730165487397643 <= 0.8537718530625
New Minimum: 0.1730165487397643 > 0.1463671754542266
F(6976088.228943626) = LineSearchPoint{point=PointSample{avg=0.1463671754542266}, derivative=1.4509388915618898E-8}, evalInputDelta = -0.7074046776082734
Right bracket at 6976088.228943626
New Minimum: 0.1463671754542266 > 0.1427543550311774
F(6616279.959286378) = LineSearchPoint{point=PointSample{avg=0.1427543550311774}, derivative=5.529465380429502E-9}, evalInputDelta = -0.7110174980313226
Right bracket at 6616279.959286378
New Minimum: 0.1427543550311774 > 0.14224096907478184
F(6481942.702262877) = LineSearchPoint{point=PointSample{avg=0.14224096907478184}, derivative=2.107301532596665E-9}, evalInputDelta = -0.7115308839877181
Right bracket at 6481942.702262877
Converged to right
Fitness changed from 0.8537718530625 to 0.14224096907478184
Iteration 1 complete. Error: 0.14224096907478184 Total: 91.3829; Orientation: 2.6421; Line Search: 84.2824
Final threshold in iteration 1: 0.14224096907478184 (> 0.0) after 91.384s (< 30.000s)

Returns

    0.14224096907478184

This training apply resulted in the following configuration:

TrainingTester.java:610 executed in 0.00 seconds (0.000 gc):

    RefList<double[]> state = network.state();
    assert state != null;
    String description = state.stream().map(RefArrays::toString).reduce((a, b) -> a + "\n" + b)
        .orElse("");
    state.freeRef();
    return description;

Returns

    

And regressed input:

TrainingTester.java:622 executed in 0.01 seconds (0.000 gc):

    return RefArrays.stream(RefUtil.addRef(data)).flatMap(x -> {
      return RefArrays.stream(x);
    }).limit(1).map(x -> {
      String temp_18_0015 = x.prettyPrint();
      x.freeRef();
      return temp_18_0015;
    }).reduce((a, b) -> a + "\n" + b).orElse("");

Returns

    [
    	[ [ 1.612, -0.136, -1.268, -0.688, -1.768, 1.3484662209649134, 0.8048918142702819, -0.324, ... ], [ -1.116, 0.304, 1.06, -1.076, 0.652, -0.4, 0.832, 1.4626684310882283, ... ], [ -0.644, 1.938728513348579, 1.196, 0.868, -1.96, 1.448, -0.932, -1.232, ... ], [ 0.9244150067759992, -0.804, 1.4273395088087988, 1.9654181174955463, -1.288, 1.1010401635723404, -0.5834260573925732, -1.368, ... ], [ 0.9177977898766853, 1.3279540790757705, 1.86, -1.496, 0.32, 0.9275902864886857, -0.108, 0.5188891676379397, ... ], [ -0.44, 1.296, -0.752, -1.548, 0.008, -0.096, 0.732, 0.552, ... ], [ 1.0, 1.2165986904729174, 1.136, -0.18, 0.88, 1.2851726330803441, 0.548, -0.824, ... ], [ -1.196, -0.86, 1.272, -1.532, -0.644, -0.36, -0.204, 0.8434313506572579, ... ], ... ],
    	[ [ 1.4632882946198844, 0.136, 0.8275037379049113, 0.332, 0.5293986628947405, -0.596, -1.408, -0.928, ... ], [ -0.104, 1.1262428377828597, 1.496, 1.4089324419298266, 1.14, 1.152, 1.38, -1.016, ... ], [ -0.168, -0.74, -0.168, 0.372, 1.2658331242715453, 1.952, -0.176, 1.152, ... ], [ 0.74, -1.816, 1.428, 0.328, -0.768, 0.856, -1.168, 1.4571779263450289, ... ], [ 0.584, -1.456, 1.4304176534083415, -0.792, 1.304, 0.136, -0.492, -0.208, ... ], [ 0.532, -1.48, 0.976, 1.5233642566742895, 0.6152017460361103, 0.144, -0.08362032761886096, -0.004, ... ], [ -0.708, -1.74, 0.028, -1.652, -0.968, -1.604, 0.52, 0.396, ... ], [ 1.0062587175769138, -1.4, 1.6103099317658283, 1.5157280492613743, 1.9554719783168029, 0.18, 1.769684774969487, -1.784, ... ], ... ],
    	[ [ 1.056, 0.21801852642639652, 0.1, 0.46953113240274424, -1.62, -0.872, 1.0683178716628552, 1.748, ... ], [ -0.812, 0.332, 1.961229140533943, -0.808, 0.964, 0.852, -0.236, 0.09392668457793762, ... ], [ -1.128, 2.001305102588348, 1.8644044202466297, -0.852, 1.768, -0.492, 1.38, 1.9866631378235435, ... ], [ 0.431487858110857, -1.172, -1.2, -1.468, -0.36, -1.668, 1.8025660027103998, -1.444, ... ], [ 1.8201836836969183, 1.32, 1.7493368621764565, -0.416, -0.612, 0.28774922232011324, -0.384, -0.692, ... ], [ 0.228, 1.7094852114785146, -0.972, 1.332875934476228, 0.884, 0.04, 1.4250702047025154, -0.96, ... ], [ -1.832, 0.5692697681934881, 1.638309931765828, 0.7, 1.44, 1.7180538608212566, 0.86, 0.18432316492753997, ... ], [ 1.0, -0.448, 0.712, -1.252, 1.0057006547635412, 1.436, 1.5862534243122293, 0.672, ... ], ... ],
    	[ [ -0.42, -1.52, 1.48, -1.26, 1.5441995634909724, 1.380261364209256, -1.8, 1.592, ... ], [ 1.4087682128337145, 0.892, 0.672, -1.824, 0.216, 0.272, -0.444, 1.736, ... ], [ -2.0, -1.04, -0.488, -1.92, -1.34, 0.51131833575006, -1.78, 1.268, ... ], [ 0.012, -0.152, 0.26, 1.146309931765828, -0.46303579848206455, -1.352, -0.476, -1.808, ... ], [ -0.204, 0.312, -1.968, -1.14, -1.584, -0.66, -1.212, 2.0014993728146355, ... ], [ 0.12, 1.404, -1.584, -0.272, 1.4951699864480017, 0.484, -1.628, -0.74, ... ], [ 1.7636821283371447, -1.812, 1.396, 0.5419567257081128, 0.648, -0.82, -0.004, -1.292, ... ], [ 1.044, -0.756, -0.992, 0.544, -1.696, 1.836, 0.016, 0.196, ... ], ... ],
    	[ [ -1.076, -1.564, 1.395487858110857, 0.744, -1.616, -1.144, -0.524, -1.288, ... ], [ 0.588, 0.212, -0.12, 0.208, -1.424, -1.848, 1.6031240655237724, 1.303493151375542, ... ], [ 0.7902940519717739, 0.26, -0.972, -0.116, 0.068, 1.693571760062289, 1.5447735060983994, -1.164, ... ], [ -1.316, 0.428, -0.124, -0.656, 1.7092185540045737, -1.364, 1.056, 1.8744212282150934, ... ], [ -0.34874174651029066, 1.838641964764805, 0.652, -0.372, -1.552, 0.272, -0.072, -0.848, ... ], [ -0.896, -0.132, -0.128, -0.708, -1.12, -1.62, 1.830407066878972, -1.732, ... ], [ -0.552, -1.644, 0.448, 0.8631452385825111, 0.8139973533676577, -0.708, 0.304, 0.68, ... ], [ 1.0228167803902863, 1.70115675328629, 1.128, -1.092, -0.844, -0.464, 1.62, 0.452, ... ], ... ],
    	[ [ -0.728, 1.7327064121154305, 1.232, -0.38, -1.348, 1.448521009960579, 1.332, -0.548, ... ], [ 1.6887064121154305, -0.388, 0.944, 1.5836362074129153, 1.7900627288926927, -1.264, -0.86, -0.336, ... ], [ -1.96, 0.952, 0.112, -1.864, -1.892, 0.536, -0.844, -1.864, ... ], [ -1.576, 1.9726604911912011, 1.893229140533943, 0.9082154432850267, 0.468, -0.836, 0.092, -0.936, ... ], [ 0.288, -1.108, 0.2822640108415986, 0.1457359891584014, -0.504, 1.526770859466057, -0.508, -1.3, ... ], [ 0.212, -1.828, -0.216, -1.432, 1.840019454600806, -0.592, -0.084, 1.2608953890770338, ... ], [ -0.508, 0.208, 1.991431350657258, 0.448, -1.216, -1.564, 1.740927148665142, -1.132, ... ], [ -0.52, -0.472, -1.632, -0.268, -1.216, 1.829213260739889, -0.256, 1.5904123601436568, ... ], ... ],
    	[ [ -0.528, -0.16, 0.424, -0.904, 0.948, 0.8720670939829684, -0.096, 1.9054287040249154, ... ], [ -1.912, -1.336, -0.536, 1.056, 1.776245484415202, 1.152, -1.172, 0.348, ... ], [ 0.06, -1.224, 0.5820891952161166, 1.8829245020327996, 0.9112830013551999, 0.06, 0.9074878581108571, -0.824, ... ], [ 0.9082560709445715, -0.92, -1.812, -0.72, 0.252, -1.228, 0.772, -0.552, ... ], [ -1.496, 0.204, 0.708, 1.252, -0.4944724424040077, -0.588, -1.308, -0.796, ... ], [ -0.376, 0.876, 1.928, 0.404, 0.948, 1.3168300135519984, 1.5807735060983994, -1.204, ... ], [ -1.532, -1.216, -0.4, 0.064, -1.528, 1.5382127966526844, -1.852, 0.264, ... ], [ -1.072, 1.8562560709445717, -1.12, 1.144, 0.06, -1.076, 0.196, 0.7667867392601111, ... ], ... ],
    	[ [ -0.168, 1.9270728513348578, -1.684, 1.1752830013551998, -1.104, -0.616, 0.3604203000406838, -0.248, ... ], [ 1.991789849979658, 1.22, 0.21108343786422734, 1.0, -1.48, 0.68, 0.784, 1.46, ... ], [ 0.1, 1.608, -0.78, -0.14, -1.684, -1.132, -1.248, -1.572, ... ], [ -0.616, 1.770105075010171, -1.108, -0.636, -1.724, 1.851528485770402, -1.524, 1.721299809323663, ... ], [ 0.656, 1.2038004365090273, -1.112, 1.7399487858110856, -1.464, 0.724, 1.044, -0.644, ... ], [ 1.9563072851334857, -1.512, 1.972512141889143, -1.192, 0.152, -1.868, 0.768, 0.24394349254640096, ... ], [ 1.8530136972489166, -1.856, 1.7545395363869758, 1.8149245020327998, -0.844, 0.936, -1.004, -1.608, ... ], [ 0.828, 1.7, 0.1, 0.576, 1.7078410641685724, -1.168, 1.8044962620950886, -0.068, ... ], ... ],
    	...
    ]

To produce the following output:

TrainingTester.java:633 executed in 0.10 seconds (0.000 gc):

    Result[] array = ConstantResult.batchResultArray(pop(RefUtil.addRef(data)));
    @Nullable
    Result eval = layer.eval(array);
    assert eval != null;
    TensorList tensorList = Result.getData(eval);
    String temp_18_0016 = tensorList.stream().limit(1).map(x -> {
      String temp_18_0017 = x.prettyPrint();
      x.freeRef();
      return temp_18_0017;
    }).reduce((a, b) -> a + "\n" + b).orElse("");
    tensorList.freeRef();
    return temp_18_0016;

Returns

    [
    	[ [ 1.612, 1.1262428377828597, 1.496, 1.4089324419298266, 1.14, 1.3484662209649134, 1.38, 1.4626684310882283, ... ], [ 0.9244150067759992, 1.938728513348579, 1.428, 1.9654181174955463, 1.2658331242715453, 1.952, -0.176, 1.4571779263450289, ... ], [ 0.9177977898766853, 1.3279540790757705, 1.86, 1.5233642566742895, 1.304, 0.9275902864886857, 0.732, 0.552, ... ], [ 1.0062587175769138, 1.2165986904729174, 1.6103099317658283, 1.5157280492613743, 1.9554719783168029, 1.2851726330803441, 1.769684774969487, 0.8434313506572579, ... ], [ 1.6816441473099422, 1.7883584993224002, 1.4798357709038874, 1.7214799182138298, 1.279226493901601, 1.1090913777612545, 1.6203019918688013, 0.7206816642499401, ... ], [ 1.8965121418891429, 1.9622022101233147, 1.453439290554285, 1.925684774969487, 1.4034366439219426, 1.252, 1.743113478994403, 1.216, ... ], [ 0.9027355250711966, 1.348, 1.955819891109834, 1.3990781445995426, 0.6431452385825112, 0.428, 0.688, 1.588, ... ], [ 1.351908158151541, 1.137172633080344, 1.785382783100686, 1.4437951432443428, 1.016, 0.9460591540859414, 0.104, 1.9265501229163453, ... ], ... ],
    	[ [ 1.4087682128337145, 0.892, 1.961229140533943, 0.46953113240274424, 1.5441995634909724, 1.380261364209256, 1.0683178716628552, 1.748, ... ], [ 0.431487858110857, 2.001305102588348, 1.8644044202466297, 1.146309931765828, 1.768, 0.51131833575006, 1.8025660027103998, 1.9866631378235435, ... ], [ 1.8201836836969183, 1.7094852114785146, 1.7493368621764565, 1.332875934476228, 1.4951699864480017, 0.484, 1.4250702047025154, 2.0014993728146355, ... ], [ 1.7636821283371447, 0.5692697681934881, 1.638309931765828, 0.7, 1.44, 1.836, 1.5862534243122293, 0.672, ... ], [ 1.9039947067353151, 1.6987143520124577, 1.8653315689117718, 1.48, 1.5841572173734944, 1.823528485770402, 1.9753024559560055, 1.5727629195690298, ... ], [ 0.6680918418484592, 2.001207967475204, 1.3326710777205708, 1.316, 1.22, 1.874391187084918, 0.49304016357234026, 1.976, ... ], [ -0.292, 0.6724203000406839, 1.1874772715814874, 1.816, 1.976962483060002, 0.1729006823417183, 0.0750481034693673, 0.1960670939829684, ... ], [ 1.5352882946198845, 1.3107196452771426, 1.2412185540045737, 1.9902993452364588, 1.164, 1.6204662209649134, 0.9407735060983992, 1.1658083764060545, ... ], ... ],
    	[ [ 1.6887064121154305, 1.7327064121154305, 1.395487858110857, 1.5836362074129153, 1.7900627288926927, 1.448521009960579, 1.6031240655237724, 1.303493151375542, ... ], [ 0.7902940519717739, 1.9726604911912011, 1.893229140533943, 0.9082154432850267, 1.7092185540045737, 1.693571760062289, 1.5447735060983994, 1.8744212282150934, ... ], [ 0.288, 1.838641964764805, 0.652, 0.1457359891584014, 1.840019454600806, 1.526770859466057, 1.830407066878972, 1.2608953890770338, ... ], [ 1.0228167803902863, 1.70115675328629, 1.991431350657258, 0.8631452385825111, 0.8139973533676577, 1.829213260739889, 1.740927148665142, 1.5904123601436568, ... ], [ 1.4586225101639987, 1.840245484415202, 1.051733342526059, 1.6892856479875422, 1.7925580628133726, 1.592, 2.0125315964899486, 1.36, ... ], [ 1.488, 1.0182693041062834, 1.208, 0.7059408459140585, 0.974463574332571, 0.528, 0.5912264939016008, 1.6197492223201133, ... ], [ 1.2086039837376021, 1.5308114871256016, 1.188, 1.192, 1.7278922783574866, 0.9197598088494827, 2.0007770809051513, 2.029340436983208, ... ], [ 0.5502180899173692, 1.5812697681934882, 1.124, 1.772665784455886, 1.7839381992817163, 1.1836415006775998, 1.8871646931833168, 1.0199028648868562, ... ], ... ],
    	[ [ 1.991789849979658, 1.9270728513348578, 0.424, 1.1752830013551998, 1.776245484415202, 1.152, 0.784, 1.9054287040249154, ... ], [ 0.9082560709445715, 1.770105075010171, 0.5820891952161166, 1.8829245020327996, 0.9112830013551999, 1.851528485770402, 0.9074878581108571, 1.721299809323663, ... ], [ 1.9563072851334857, 1.2038004365090273, 1.972512141889143, 1.7399487858110856, 0.948, 1.3168300135519984, 1.5807735060983994, 0.24394349254640096, ... ], [ 1.8530136972489166, 1.8562560709445717, 1.7545395363869758, 1.8149245020327998, 1.7078410641685724, 1.5382127966526844, 1.8044962620950886, 0.7667867392601111, ... ], [ 1.836399126981945, 0.7333527419705106, 1.314827366919656, 1.048, 1.9987549796720026, 1.9364503411708591, 1.559682128337145, 0.7797492223201132, ... ], [ 0.616, 1.0965986904729175, 1.457951432443428, 0.8940839019514322, 1.2212238472692585, 1.8829598364276598, 1.6884291681121202, 1.6438922783574865, ... ], [ 1.4580079398970271, 1.8555796999593162, 1.987313042485375, 1.5617465756877709, 1.760911268871088, -0.21499517082251973, 0.951032223675313, 1.4875955797533704, ... ], [ 1.553951432443428, 1.9320512141889143, 1.6368600546821734, 0.8467249385418273, 1.0156874216018295, 1.989382783100686, 1.081388076365371, 1.3634772715814876, ... ], ... ],
    	[ [ 1.484603983737602, 1.2589704229570293, 1.7295258391380595, 1.6380944884808013, 1.296, 1.5282048567556572, 0.628, 1.2355496588291408, ... ], [ 0.8504582810678863, 1.6819302593846892, 1.092, 1.608, 0.5378030831413698, 1.672, 1.1799028648868561, 1.396, ... ], [ 1.898733806613264, 1.155754515584798, 1.36219691685863, 0.36260663036994445, 1.118218089917369, 1.5088600546821738, 1.06, 0.788, ... ], [ 1.3074260573925733, 1.560967776324687, 1.7332803547228575, 0.828, 1.3401077216425135, 1.0313395088087989, 0.545972605502167, 0.8572909412522269, ... ], [ 1.5276927148665143, -0.788, 1.777116125626745, 0.576, -0.8081625106381793, 1.7680512141889144, 1.272644611397147, 1.7579002182545138, ... ], [ 0.22, 1.8422940519717739, 1.9661969168586302, 1.612005293264685, 0.996757626304345, 1.1764662209649133, 1.2137201093643473, 1.268, ... ], [ 0.8557033013958837, 1.2412750614581727, 0.6647328784388543, 1.3877633836562344, 1.525612387721834, 1.672, -0.17242030004068387, 1.9355902864886856, ... ], [ 1.056, 0.4773156891177176, 1.295339508808799, 0.636, 1.4347602729366873, 1.6567523330396603, 1.552, 1.3236662485430906, ... ], ... ],
    	[ [ 0.48580837640605457, 1.5292079674752042, 1.716301991868801, 1.4101509959344005, 1.2091214188914297, 0.972, 1.516, 1.5402613642092562, ... ], [ 1.164, 1.0516008730180553, 1.6407682128337144, 1.4648547614174887, 1.9303558526900577, 1.748490968830404, 1.5278763985634325, 1.7467602729366873, ... ], [ 1.9161995634909723, 0.10897836285405638, 1.903277708090515, 1.4318410641685724, 1.7431805729773713, -0.09671170538011542, 1.1718004365090273, 1.6859302593846892, ... ], [ 1.8611673398156596, 1.7165421830193184, 1.9608194270226287, 1.036, 1.9595390722997712, 1.6877951432443428, 1.971957653882522, 1.6496794817048022, ... ], [ 1.81462608497075, 0.78, 1.3384017736142875, 1.774048567556572, -0.13717263308034416, 1.9420997817454861, 1.5827655662013722, 1.8372026742105192, ... ], [ 1.153439290554285, 1.624, 1.7490754979672003, 1.228, 1.4074931513755418, 1.1129518965306329, 1.8821916235939455, 1.784455634435544, ... ], [ 0.22104545683702503, 1.5470834378642273, 1.3161430560373732, 1.8029245020327997, 1.044, 0.9548220736549711, 1.8103046385011434, 1.957663601910748, ... ], [ 1.0932803547228576, 1.8332291405339431, 0.68, 1.104, 0.4017766168179464, 1.1701668757284547, 0.096, 1.3640459209242297, ... ], ... ],
    	[ [ 1.9643479127930306, 1.4653933696300556, 1.5893827831006861, 1.6, 1.1691620465509747, 1.8729978174548623, 1.8155902864886857, 1.754258717576914, ... ], [ 0.5613421554411412, 1.6979355526493736, 2.001024283778286, 1.5661351161403465, 1.924153642566743, 0.9470940243935968, 1.3831240655237722, 0.308, ... ], [ 1.184, 0.79196466560514, 1.54, 1.3613827831006862, 1.9285580628133725, 1.168, 0.5778437108009147, 1.9237951432443428, ... ], [ 0.9, 1.684, 0.13334744870582604, 1.8474313506572582, 1.846806193860917, 1.943886985092802, 1.908, 1.9771673398156593, ... ], [ 0.8584688675972558, 1.7325986904729174, 1.9792353619730367, 1.8483584993224003, 1.804911268871088, 1.536, 1.8617306958937168, 1.9235337790350866, ... ], [ 0.196, 0.22, 1.8676927148665143, 1.916905975606403, 1.764, 1.424, 1.6685174351538277, 1.840153642566743, ... ], [ 1.1126251567963412, 1.9281377627726886, 1.771579699959316, 1.56479825396389, 1.1278816918281174, 0.772, 1.5004044202466296, 1.8556415006776, ... ], [ 0.9955549520938254, 1.6642560709445715, 0.808, 1.4531832196097136, 0.832476807494283, 1.4492803547228574, 1.740188976961603, 1.2576900682341718, ... ], ... ],
    	[ [ 1.1882296046211478, 0.8924255933053687, 1.9034048843338343, 1.9206905323213765, 1.836, 1.5666631378235436, 1.7838922783574866, 0.7455523054614831, ... ], [ 1.6248653479468584, 1.464, 1.230822073654971, 1.425326275647087, 0.8376741884401175, 1.7380944884808014, 1.4, 1.9444609277002287, ... ], [ 0.8469033289740606, 1.8232777080905151, 1.6417924966120003, 1.7071187722590873, 1.5875744066946313, 0.148, 1.9016794817048024, 1.0663611459547426, ... ], [ 1.4543099317658281, 0.03638496564582372, 1.0163885404525757, 1.373177926345029, -0.468, 1.872153642566743, 1.736, 1.1818030831413697, ... ], [ 0.7630728513348579, 0.9030428102046824, 1.2998922783574867, 1.076, 1.209439290554285, 1.6493774898360012, 1.292, 1.6392317871662854, ... ], [ 1.6232882946198846, 1.901894924989829, 1.860301991868801, 1.304, 1.628, 1.2292856479875423, 0.888, 1.428, ... ], [ 1.1743046385011433, 0.9798463574332571, 1.7016441473099422, 1.9003072851334857, 1.3637492223201133, 1.1902781721777194, 1.2379567257081128, 0.852, ... ], [ 1.236, 1.116, 1.1, 0.44, 1.7152317871662854, 1.6334852114785146, 1.5910834378642273, 1.9489165621357727, ... ], ... ],
    	...
    ]

Limited-Memory BFGS

Next, we apply the same optimization using L-BFGS, which is nearly ideal for purely second-order or quadratic functions.

TrainingTester.java:509 executed in 52.29 seconds (5.975 gc):

    IterativeTrainer iterativeTrainer = new IterativeTrainer(trainable.addRef());
    try {
      iterativeTrainer.setLineSearchFactory(label -> new ArmijoWolfeSearch());
      iterativeTrainer.setOrientation(new LBFGS());
      iterativeTrainer.setMonitor(TrainingTester.getMonitor(history));
      iterativeTrainer.setTimeout(30, TimeUnit.SECONDS);
      iterativeTrainer.setIterationsPerSample(100);
      iterativeTrainer.setMaxIterations(250);
      iterativeTrainer.setTerminateThreshold(0);
      return iterativeTrainer.run();
    } finally {
      iterativeTrainer.freeRef();
    }
Logging
Reset training subject: 1430021623093
Reset training subject: 1431594502363
Adding measurement 654fb631 to history. Total: 0
LBFGS Accumulation History: 1 points
Constructing line search parameters: GD
Non-optimal measurement 0.8537718530625 < 0.8537718530625. Total: 1
th(0)=0.8537718530625;dx=-2.668037040820336E-7
Adding measurement 597ffeb0 to history. Total: 1
New Minimum: 0.8537718530625 > 0.8537712787085724
WOLFE (weak): th(2.154434690031884)=0.8537712787085724; dx=-2.66591432808354E-7 evalInputDelta=5.743539276403808E-7
Adding measurement 1b9e0abd to history. Total: 2
New Minimum: 0.8537712787085724 > 0.8537707043548384
WOLFE (weak): th(4.308869380063768)=0.8537707043548384; dx=-2.6659134306553934E-7 evalInputDelta=1.1487076616578662E-6
Adding measurement 23000db2 to history. Total: 3
New Minimum: 0.8537707043548384 > 0.8537684069418348
WOLFE (weak): th(12.926608140191302)=0.8537684069418348; dx=-2.665909840942734E-7 evalInputDelta=3.446120665273611E-6
Adding measurement 2fe7bbb1 to history. Total: 4
New Minimum: 0.8537684069418348 > 0.8537580686216014
WOLFE (weak): th(51.70643256076521)=0.8537580686216014; dx=-2.66589368723579E-7 evalInputDelta=1.3784440898612083E-5
Adding measurement 2c2a38e3 to history. Total: 5
New Minimum: 0.8537580686216014 > 0.853702931971674
WOLFE (weak): th(258.53216280382605)=0.853702931971674; dx=-2.6658075341321716E-7 evalInputDelta=6.892109082601294E-5
Adding measurement 50d422c2 to history. Total: 6
New Minimum: 0.853702931971674 > 0.8533583682800717
WOLFE (weak): th(1551.1929768229563)=0.8533583682800717; dx=-2.665269077234429E-7 evalInputDelta=4.1348478242830833E-4
Adding measurement 18b04fd1 to history. Total: 7
New Minimum: 0.8533583682800717 > 0.8508796239671763
WOLFE (weak): th(10858.350837760694)=0.8508796239671763; dx=-2.6609038724353293E-7 evalInputDelta=0.0028922290953237173
Adding measurement 5250ea0f to history. Total: 8
New Minimum: 0.8508796239671763 > 0.8308515636862905
WOLFE (weak): th(86866.80670208555)=0.8308515636862905; dx=-2.609236484490494E-7 evalInputDelta=0.02292028937620949
Adding measurement 100581cb to history. Total: 9
New Minimum: 0.8308515636862905 > 0.6651152533297944
END: th(781801.26031877)=0.6651152533297944; dx=-2.1697977546069376E-7 evalInputDelta=0.18865659973270565
Fitness changed from 0.8537718530625 to 0.6651152533297944
Iteration 1 complete. Error: 0.6651152533297944 Total: 52.2870; Orientation: 2.4327; Line Search: 45.3220
Final threshold in iteration 1: 0.6651152533297944 (> 0.0) after 52.288s (< 30.000s)

Returns

    0.6651152533297944

This training apply resulted in the following configuration:

TrainingTester.java:610 executed in 0.00 seconds (0.000 gc):

    RefList<double[]> state = network.state();
    assert state != null;
    String description = state.stream().map(RefArrays::toString).reduce((a, b) -> a + "\n" + b)
        .orElse("");
    state.freeRef();
    return description;

Returns

    

And regressed input:

TrainingTester.java:622 executed in 0.01 seconds (0.000 gc):

    return RefArrays.stream(RefUtil.addRef(data)).flatMap(x -> {
      return RefArrays.stream(x);
    }).limit(1).map(x -> {
      String temp_18_0015 = x.prettyPrint();
      x.freeRef();
      return temp_18_0015;
    }).reduce((a, b) -> a + "\n" + b).orElse("");

Returns

    [
    	[ [ 1.612, -0.136, -1.268, -0.688, -1.768, 1.594284820648753, 1.8558553926287267, -0.324, ... ], [ -1.116, 0.304, 1.06, -1.076, 0.652, -0.4, 0.832, 1.55529572951982, ... ], [ -0.644, -0.19882452738046125, 1.196, 0.868, -1.96, 1.448, -0.932, -1.232, ... ], [ 1.1737961948610538, -0.804, 1.7515350533193699, 1.0320199563771986, -1.288, 1.8634340814323644, -0.06685073921638852, -1.368, ... ], [ 1.070989091128933, 1.6093985627717606, 1.86, -1.496, 0.32, 0.9560909936984061, -0.108, 1.4309117983489967, ... ], [ -0.44, 1.296, -0.752, -1.548, 0.008, -0.096, 0.732, 0.552, ... ], [ 1.0, 0.3402019437740109, 1.136, -0.18, 0.88, 0.9253512045576223, 0.548, -0.824, ... ], [ -1.196, -0.86, 1.272, -1.532, -0.644, -0.36, -0.204, 0.6047379277758484, ... ], ... ],
    	[ [ 1.7910464275316709, 0.136, 1.6967753078013876, 0.332, 1.2668544619462592, -0.596, -1.408, -0.928, ... ], [ -0.104, 0.4137251575398463, 1.496, 1.9005696412975062, 1.14, 1.152, 1.38, -1.016, ... ], [ -0.168, -0.74, -0.168, 0.372, 0.5818161512382524, 1.952, -0.176, 1.152, ... ], [ 0.74, -1.816, 1.428, 0.328, -0.768, 0.856, -1.168, 1.3752383931170824, ... ], [ 0.584, -1.456, 1.818739789140784, -0.792, 1.304, 0.136, -0.492, -0.208, ... ], [ 0.532, -1.48, 0.976, 0.45458773630976934, 1.7837307416346522, 0.144, 1.0029691347517344, -0.004, ... ], [ -0.708, -1.74, 0.028, -1.652, -0.968, -1.604, 0.52, 0.396, ... ], [ 1.127386723218226, -1.4, 1.7278753490059253, 0.699895305383124, 1.157452176444628, 0.18, 1.3742374624346148, -1.784, ... ], ... ],
    	[ [ 1.056, 1.19060515995811, 0.1, 0.08477158507151698, -1.62, -0.872, 1.6027061318451152, 1.748, ... ], [ -0.812, 0.332, 1.8757270189047817, -0.808, 0.964, 0.852, -0.236, 1.6294022855016315, ... ], [ -1.128, 0.5192683276828802, 1.5580218177421339, -0.852, 1.768, -0.492, 1.38, 1.80140854096036, ... ], [ 0.46711374212300766, -1.172, -1.2, -1.468, -0.36, -1.668, 1.9023184779444215, -1.444, ... ], [ 0.6944057489129571, 1.32, 1.93459145903964, -0.416, -0.612, 0.5834440596209638, -0.384, -0.692, ... ], [ 0.228, 1.6061701478432777, -0.972, 1.550193826950347, 0.884, 0.04, 1.0723739529822238, -0.96, ... ], [ -1.832, -0.07555873242643914, 1.7558753490059253, 0.7, 1.44, 1.8534322200674294, 0.86, 0.9965933204045754, ... ], [ 1.0, -0.448, 0.712, -1.252, 1.4438990281129946, 1.436, 1.429499534658766, 0.672, ... ], ... ],
    	[ [ -0.42, -1.52, 1.48, -1.26, 1.252067314591337, 1.640330317497956, -1.8, 1.592, ... ], [ 1.3553293868154885, 0.892, 0.672, -1.824, 0.216, 0.272, -0.444, 1.736, ... ], [ -2.0, -1.04, -0.488, -1.92, -1.34, -0.2760137009184699, -1.78, 1.268, ... ], [ 0.012, -0.152, 0.26, 1.2638753490059254, 1.6958927726542659, -1.352, -0.476, -1.808, ... ], [ -0.204, 0.312, -1.968, -1.14, -1.584, -0.66, -1.212, -0.05055154628524283, ... ], [ 0.12, 1.404, -1.584, -0.272, 0.9964076102778925, 0.484, -1.628, -0.74, ... ], [ 1.2292938681548846, -1.812, 1.396, 0.9623421570514907, 0.648, -0.82, -0.004, -1.292, ... ], [ 1.044, -0.756, -0.992, 0.544, -1.696, 1.836, 0.016, 0.196, ... ], ... ],
    	[ [ -1.076, -1.564, 1.4311137421230076, 0.744, -1.616, -1.144, -0.524, -1.288, ... ], [ 0.588, 0.212, -0.12, 0.208, -1.424, -1.848, 1.3858061730496531, 1.6170009306824675, ... ], [ 0.0742137833275455, 0.26, -0.972, -0.116, 0.068, 0.7494858337402963, 1.7692165753749485, -1.164, ... ], [ -1.316, 0.428, -0.124, -0.656, 1.0679526417858616, -1.364, 1.056, -0.24175628210665662, ... ], [ 1.0941065559818113, 0.5418597867225202, 0.652, -0.372, -1.552, 0.272, -0.072, -0.848, ... ], [ -0.896, -0.132, -0.128, -0.708, -1.12, -1.62, 1.6629654120218638, -1.732, ... ], [ -0.552, -1.644, 0.448, 1.7573549272874929, 0.67505640572027, -0.708, 0.304, 0.68, ... ], [ 0.8268744183234578, 0.5076896388792425, 1.128, -1.092, -0.844, -0.464, 1.62, 0.452, ... ], ... ],
    	[ [ -0.728, 1.1270663839088693, 1.232, -0.38, -1.348, -0.8137226248109883, 1.332, -0.548, ... ], [ 1.0830663839088692, -0.388, 0.944, 1.3306924309266455, -0.30117666262055126, -1.264, -0.86, -0.336, ... ], [ -1.96, 0.952, 0.112, -1.864, -1.892, 0.536, -0.844, -1.864, ... ], [ -1.576, 1.64846494668063, 1.8077270189047816, 1.4497288802697168, 0.468, -0.836, 0.092, -0.936, ... ], [ 0.288, -1.108, 0.681273911777686, -0.2532739117776861, -0.504, 1.6122729810952185, -0.508, -1.3, ... ], [ 0.212, -1.828, -0.216, -1.432, 0.16916549443093948, -0.592, -0.084, -0.19264067861871365, ... ], [ -0.508, 0.208, 1.7527379277758484, 0.448, -1.216, -1.564, 1.9546824527380462, -1.132, ... ], [ -0.52, -0.472, -1.632, -0.268, -1.216, 0.9100654532264016, -0.256, 1.7008526005813238, ... ], ... ],
    	[ [ -0.528, -0.16, 0.424, -0.904, 0.948, 1.702150191466079, -0.096, 1.5277943334961184, ... ], [ -1.912, -1.336, -0.536, 1.056, 1.2026687518195764, 1.152, -1.172, 0.348, ... ], [ 0.06, -1.224, -0.11974071982325146, 1.9577388584583162, 0.9611592389722108, 0.06, 0.9431137421230077, -0.824, ... ], [ 0.8904431289384962, -0.92, -1.812, -0.72, 0.252, -1.228, 0.772, -0.552, ... ], [ -1.496, 0.204, 0.708, 1.252, 1.6252676563189574, -0.588, -1.308, -0.796, ... ], [ -0.376, 0.876, 1.928, 0.404, 0.948, 1.8155923897221076, 1.8052165753749485, -1.204, ... ], [ -1.532, -1.216, -0.4, 0.064, -1.528, 1.9407852859899868, -1.852, 0.264, ... ], [ -1.072, 1.8384431289384962, -1.12, 1.144, 0.06, -1.076, 0.196, 1.6859345467735984, ... ], ... ],
    	[ [ -0.168, 1.7133175472619537, -1.684, 1.2251592389722108, -1.104, -0.616, 0.8876833834205138, -0.248, ... ], [ 1.7281583082897432, 1.22, 0.5530919243808737, 1.0, -1.48, 0.68, 0.784, 1.46, ... ], [ 0.1, 1.608, -0.78, -0.14, -1.684, -1.132, -1.248, -1.572, ... ], [ -0.616, 1.9019208458551284, -1.108, -0.636, -1.724, 1.327827990791787, -1.524, -0.038618860876579786, ... ], [ 0.656, 1.495932685408663, -1.112, 1.7435113742123007, -1.464, 0.724, 1.044, -0.644, ... ], [ 1.9349317547261953, -1.512, 1.9368862578769923, -1.192, 0.152, -1.868, 0.768, -0.03037581434715919, ... ], [ 1.2259981386350647, -1.856, 0.4648825351471217, 1.8897388584583161, -0.844, 0.936, -1.004, -1.608, ... ], [ 0.828, 1.7, 0.1, 0.576, 1.4406469340774422, -1.168, 0.9352246921986124, -0.068, ... ], ... ],
    	...
    ]

To produce the following output:

TrainingTester.java:633 executed in 0.10 seconds (0.000 gc):

    Result[] array = ConstantResult.batchResultArray(pop(RefUtil.addRef(data)));
    @Nullable
    Result eval = layer.eval(array);
    assert eval != null;
    TensorList tensorList = Result.getData(eval);
    String temp_18_0016 = tensorList.stream().limit(1).map(x -> {
      String temp_18_0017 = x.prettyPrint();
      x.freeRef();
      return temp_18_0017;
    }).reduce((a, b) -> a + "\n" + b).orElse("");
    tensorList.freeRef();
    return temp_18_0016;

Returns

    [
    	[ [ 1.7910464275316709, 0.4137251575398463, 1.6967753078013876, 1.9005696412975062, 1.2668544619462592, 1.594284820648753, 1.8558553926287267, 1.55529572951982, ... ], [ 1.1737961948610538, -0.19882452738046125, 1.7515350533193699, 1.0320199563771986, 0.5818161512382524, 1.952, -0.06685073921638852, 1.3752383931170824, ... ], [ 1.070989091128933, 1.6093985627717606, 1.86, 0.45458773630976934, 1.7837307416346522, 0.9560909936984061, 1.0029691347517344, 1.4309117983489967, ... ], [ 1.127386723218226, 0.3402019437740109, 1.7278753490059253, 0.699895305383124, 1.157452176444628, 0.9253512045576223, 1.3742374624346148, 0.6047379277758484, ... ], [ 1.8455232137658355, 1.7634203805138946, 0.9347597455179824, 1.3402829592838177, 1.0547834246250516, 1.8679227072200637, 1.3210445661667354, 1.50801370091847, ... ], [ 1.8608862578769925, 1.809010908871067, 1.6315687106150383, 1.5302374624346147, 1.4426251163353083, 1.5911037639344083, 0.9700317959307332, 1.2873430877339582, ... ], [ 1.825445920985899, 1.6322293456109507, 0.5770981798396025, 1.4632047358214137, 1.537354927287493, 1.0185515462852428, 0.9026487954423776, 1.588, ... ], [ 1.9147971255435214, 0.7773512045576223, 1.6891928962678793, 1.458045496849203, 1.1285459621904368, 1.3593194086268892, 0.8866388172537784, 1.1926569122660418, ... ], ... ],
    	[ [ 1.3553293868154885, 1.19060515995811, 1.8757270189047817, 0.08477158507151698, 1.252067314591337, 1.640330317497956, 1.6027061318451152, 1.748, ... ], [ 0.46711374212300766, 0.5192683276828802, 1.5580218177421339, 1.2638753490059254, 1.768, -0.2760137009184699, 1.9023184779444215, 1.80140854096036, ... ], [ 0.6944057489129571, 1.6061701478432777, 1.93459145903964, 1.550193826950347, 0.9964076102778925, 0.5834440596209638, 1.0723739529822238, -0.05055154628524283, ... ], [ 1.2292938681548846, -0.07555873242643914, 1.7558753490059253, 0.9623421570514907, 1.4438990281129946, 1.8534322200674294, 1.429499534658766, 0.9965933204045754, ... ], [ 1.62611281144054, 1.5098971667480592, 1.77270427048018, 1.6341638923845492, -0.9310301938843428, 1.299827990791787, 0.35432473340315024, 1.2414421982560284, ... ], [ 0.10520287445647854, 0.8041782646669418, 1.56423932379955, 1.3168426223927245, 1.22, 0.873303846343484, 1.2554340814323646, 1.976, ... ], [ -0.1705777581211701, 1.1996833834205138, 0.6673393650040877, 1.816, 1.3535095128473655, -1.0027534900592536, 1.2542648642715544, 1.026150191466079, ... ], [ 1.8630464275316707, 1.399784355307519, 0.5999526417858616, 1.5521009718870054, 1.4608363669339959, 1.866284820648753, 1.1652165753749484, 1.874763468247853, ... ], ... ],
    	[ [ 1.0830663839088692, 1.1270663839088693, 1.4311137421230076, 1.3306924309266455, -0.30117666262055126, -0.8137226248109883, 1.3858061730496531, 1.6170009306824675, ... ], [ 0.0742137833275455, 1.64846494668063, 1.8077270189047816, 1.4497288802697168, 1.0679526417858616, 0.7494858337402963, 1.7692165753749485, -0.24175628210665662, ... ], [ 1.0941065559818113, 0.5418597867225202, 0.681273911777686, -0.2532739117776861, 0.16916549443093948, 1.6122729810952185, 1.6629654120218638, -0.19264067861871365, ... ], [ 0.8268744183234578, 0.5076896388792425, 1.7527379277758484, 1.7573549272874929, 0.67505640572027, 0.9100654532264016, 1.9546824527380462, 1.7008526005813238, ... ], [ 1.8326942922915805, 1.2666687518195763, 0.5137824939425839, 1.8781028332519407, 1.4754876951052316, 1.805489556470167, 0.30605175230793175, 1.476330317497956, ... ], [ 1.900081015509807, 1.695161100337146, 1.208, 0.2926805913731108, 1.0813412263690232, 0.7182411851644852, 0.3667834246250516, 1.9154440596209639, ... ], [ 0.6100891323334708, 1.0569872297639977, 1.5793904459480967, 1.5841383519125445, 1.4571355598651414, 1.7712184367398836, -0.2792794958724919, -0.2899046122078004, ... ], [ 1.2306724745494468, 0.9364412675735608, 1.1598990281129946, 1.72635213524009, 1.2317369970933807, 1.2085796194861054, 1.1105204217184323, 1.3049099369840615, ... ], ... ],
    	[ [ 1.7281583082897432, 1.7133175472619537, 0.5530919243808737, 1.2251592389722108, 1.2026687518195764, 1.702150191466079, 0.8876833834205138, 1.5277943334961184, ... ], [ 0.8904431289384962, 1.9019208458551284, -0.11974071982325146, 1.9577388584583162, 0.9611592389722108, 1.327827990791787, 0.9431137421230077, -0.038618860876579786, ... ], [ 1.9349317547261953, 1.495932685408663, 1.9368862578769923, 1.7435113742123007, 1.6252676563189574, 1.8155923897221076, 1.8052165753749485, -0.03037581434715919, ... ], [ 1.2259981386350647, 1.8384431289384962, 0.4648825351471217, 1.8897388584583161, 1.4406469340774422, 1.9407852859899868, 0.9352246921986124, 1.6859345467735984, ... ], [ 1.2521346291826738, 1.7522530247180197, 1.6746487954423777, 1.736516027624639, 1.2506114154168384, 1.3486232549703732, 1.0252938681548847, 1.0754440596209638, ... ], [ 0.98142224187883, 0.22020194377401092, 1.6004549684920308, -0.08562790838271137, 0.8578398303453216, 1.1205659185676355, -0.010925499267466693, 1.3731355598651416, ... ], [ 1.87483078283919, 1.3283166165794862, 0.9220991105220702, 1.718500465341234, 1.1410208870596663, 1.3846070213230453, 1.2966032985931746, 1.7939781822578662, ... ], [ 1.6964549684920307, 1.9284886257876992, 1.020532261271967, 1.2136715438669792, 0.7591810567143447, 1.8931928962678792, 1.2630800848273391, 0.8433393650040878, ... ], ... ],
    	[ [ 0.8860891323334709, 1.0523402956865555, 1.066884396512057, 1.2141464687362085, 1.8493312481804236, 1.513954503150797, 1.549938269503469, 1.8233767450296268, ... ], [ 0.6794540378095631, 0.7129062142541909, 1.253685244785449, 1.683346810463829, 0.9688762796883931, 1.807467738728033, 1.4649099369840615, 1.5005496849203075, ... ], [ 0.03906266117899873, 1.7293312481804237, 0.931123720311607, -0.09696727338679917, 1.7986724745494467, 0.892532261271967, 1.1678990281129946, 1.225513235577236, ... ], [ 0.7908507392163885, 1.2153967014068254, 1.6442156446924807, 1.6122330683408213, 1.6108644401348584, 1.35553505331937, 1.8000037227298706, 1.3239900218114007, ... ], [ 1.5490682452738047, -0.17244126757356087, 1.1429753902104631, 1.6696515874897806, 1.4291430053248828, 1.7644886257876993, 0.11480338100225017, 1.9039663427043314, ... ], [ 1.0689572951981998, 1.1262137833275454, 1.535123720311607, 1.88988718855946, 0.38755500969656853, 1.422284820648753, -0.01893547745606597, 1.5256279083827113, ... ], [ 1.4328426223927246, 0.8743284561330208, 1.4485023267061692, -0.2652776345075567, 0.02220008240907559, 1.8412402544820174, -0.6996833834205138, 1.9640909936984061, ... ], [ 1.42871797139865, -0.4489572951981998, 1.61953505331937, 0.9362966602022876, 0.9644986039762986, 0.7696678211371086, 1.8348208046505907, -0.0443676975234952, ... ], ... ],
    	[ [ 1.194763468247853, 0.3321782646669417, 1.4170445661667352, 1.2605222830833678, 0.8528625787699231, 0.972, 1.5508781410533283, 1.8003303174979561, ... ], [ 1.3441146728054754, 1.635865370817326, 1.5873293868154885, 0.5706450727125071, 1.7664767862341646, 0.6013375036391525, 0.4234739941867617, 1.2764986039762984, ... ], [ 1.6240673145913371, 0.31917107852574533, 1.6752720504127507, 1.1646469340774424, 1.8001819873968123, 0.23104642753167076, 1.463932685408663, 0.7169062142541909, ... ], [ 1.2234640159981622, 0.5658261294268517, 1.9038180126031876, 1.7017862166724544, 1.9916023679107069, 1.7020454968492031, -0.25109750847567974, 0.9763502738751547, ... ], [ -0.3158017789558597, 1.0199900218114009, 0.8930782234624038, 1.6315450315079694, 0.22264879544237764, 1.7960336572956686, 1.3903857925357586, 0.36229107610748174, ... ], [ 1.3315687106150385, 1.6780691759562723, 1.6742611415416837, 1.4792184367398837, 1.7210009306824676, -0.06626486427155445, 1.1732365317521471, 1.474510443529833, ... ], [ 1.2613212699918244, 1.8890919243808737, 0.7496915002441779, 1.8777388584583161, 1.4571156034879431, 1.0367616068829177, 1.6499881604464652, 0.4506887081967748, ... ], [ 1.0042156446924808, 1.7477270189047815, 1.3457999175909243, 1.5810956471107445, -0.5565596631089067, 1.8541838487617475, 1.3376752665968499, 1.0826014372282393, ... ], ... ],
    	[ [ 1.3836460033949747, 1.924967273386799, 1.4931928962678793, 1.90377437711892, 0.25357682743870247, 0.4123365729566848, 1.8440909936984062, 1.875386723218226, ... ], [ 1.0244786475991, 1.006793402813651, 1.9297725157539847, 0.5828607174049878, 1.9134658773630977, 1.8448663014997937, 1.165806173049653, 1.6994141250551658, ... ], [ 1.184, 1.6291729398906807, 1.545513235577236, 1.2651928962678793, 1.6114876951052317, 1.1732975908847554, 0.44959052835717234, 1.938045496849203, ... ], [ 1.10988718855946, 1.684, 0.8743658361585599, 1.6087379277758487, 1.0951000412045377, 1.3952483713056816, 1.908, 1.3394640159981623, ... ], [ 1.2432284149284831, 0.8562019437740109, -0.471825458062929, 1.8234203805138947, 1.1850208870596664, 1.6583758143471592, 1.184838899662854, 1.6777151793512468, ... ], [ 0.196, 1.5233904459480967, 1.8890682452738046, 1.0191336985002064, 1.8036260470177763, 1.424, 1.9107734464364523, 1.8294658773630978, ... ], [ 1.6256378865713106, 1.0838043116847178, 1.2443166165794861, 0.3962692583653479, 0.3013611827462217, 0.8313767450296269, 1.1940218177421338, 1.8805796194861053, ... ], [ 1.8612639335890868, 1.646443128938496, 1.1796734052319144, 1.6491255816765422, 1.634059197767673, 1.360215644692481, 0.8922929374724171, 1.1401246509940748, ... ], ... ],
    	[ [ -0.21899281385880362, 1.6975705719799739, 0.2753019849785487, 0.4814048182304894, 1.8951037639344084, 1.38140854096036, 1.5131355598651415, 1.472320339309357, ... ], [ 1.286419449831427, 1.7179719267991373, 1.3127616068829178, 1.05481708192072, -0.11353691468430524, 1.3141464687362086, 1.7900492195790736, 1.912397632089293, ... ], [ -0.18980989577952373, 1.5952720504127509, 1.5171019025694732, 1.211918984490193, 0.7824294280200264, 0.16573885845831612, 1.2283502738751546, 1.1803639747936245, ... ], [ 1.5718753490059254, 1.4008563233111946, -0.12363974793624599, 1.2912383931170823, 0.4741165341704106, 1.8614658773630979, 1.736, 1.6128762796883929, ... ], [ 0.5493175472619539, 1.8043776757120944, 1.0291355598651415, 1.402935477456066, 1.3875687106150383, 1.2753057077084193, 1.8580255404720045, 1.6926706131845115, ... ], [ 1.9510464275316708, 1.7700791541448715, 1.5610445661667354, 1.815252094035552, 1.835798056225989, 1.4181028332519408, 1.0063758143471593, 1.6365596631089068, ... ], [ 1.0139881604464653, 0.9905341226369023, 1.8655232137658353, 1.8789317547261952, 1.6594440596209639, -0.3594477823508344, 1.6583421570514907, 0.852, ... ], [ 1.6879563645157323, 1.7413786063945622, 1.5671411439599474, 1.4741738705731482, 1.7686706131845116, 1.5301701478432777, 1.9330919243808737, 1.6069080756191263, ... ], ... ],
    	...
    ]

TrainingTester.java:432 executed in 0.15 seconds (0.000 gc):

    return TestUtil.compare(title + " vs Iteration", runs);
Logging
Plotting range=[0.0, -0.8469752975549875], [2.0, -0.17710309221847387]; valueStats=DoubleSummaryStatistics{count=3, sum=1.472471, min=0.142241, average=0.490824, max=0.665115}
Only 1 points for GD
Only 1 points for CjGD
Only 1 points for LBFGS

Returns

Result

TrainingTester.java:435 executed in 0.02 seconds (0.000 gc):

    return TestUtil.compareTime(title + " vs Time", runs);
Logging
Plotting range=[-1.0, -0.8469752975549875], [1.0, -0.17710309221847387]; valueStats=DoubleSummaryStatistics{count=3, sum=1.472471, min=0.142241, average=0.490824, max=0.665115}
Only 1 points for GD
Only 1 points for CjGD
Only 1 points for LBFGS

Returns

Result

Results

TrainingTester.java:255 executed in 0.00 seconds (0.000 gc):

    return grid(inputLearning, modelLearning, completeLearning);

Returns

Result

TrainingTester.java:258 executed in 0.00 seconds (0.000 gc):

    return new ComponentResult(null == inputLearning ? null : inputLearning.value,
        null == modelLearning ? null : modelLearning.value, null == completeLearning ? null : completeLearning.value);

Returns

    {"input":{ "LBFGS": { "type": "NonConverged", "value": 0.6651152533297944 }, "CjGD": { "type": "NonConverged", "value": 0.14224096907478184 }, "GD": { "type": "NonConverged", "value": 0.6651152533297944 } }, "model":null, "complete":null}

LayerTests.java:425 executed in 0.00 seconds (0.000 gc):

    throwException(exceptions.addRef());

Results

detailsresult
{"input":{ "LBFGS": { "type": "NonConverged", "value": 0.6651152533297944 }, "CjGD": { "type": "NonConverged", "value": 0.14224096907478184 }, "GD": { "type": "NonConverged", "value": 0.6651152533297944 } }, "model":null, "complete":null}OK
  {
    "result": "OK",
    "performance": {
      "execution_time": "197.795",
      "gc_time": "17.290"
    },
    "created_on": 1586735917409,
    "file_name": "trainingTest",
    "report": {
      "simpleName": "Double",
      "canonicalName": "com.simiacryptus.mindseye.layers.cudnn.PoolingLayerTest.Double",
      "link": "https://github.com/SimiaCryptus/mindseye-cudnn/tree/59d5b3318556370acb2d83ee6ec123ce0fc6974f/src/test/java/com/simiacryptus/mindseye/layers/cudnn/PoolingLayerTest.java",
      "javaDoc": ""
    },
    "training_analysis": {
      "input": {
        "LBFGS": {
          "type": "NonConverged",
          "value": 0.6651152533297944
        },
        "CjGD": {
          "type": "NonConverged",
          "value": 0.14224096907478184
        },
        "GD": {
          "type": "NonConverged",
          "value": 0.6651152533297944
        }
      }
    },
    "archive": "s3://code.simiacrypt.us/tests/com/simiacryptus/mindseye/layers/cudnn/PoolingLayer/Double/trainingTest/202004125837",
    "id": "45535e1b-373d-43a7-8866-f570b464fa94",
    "report_type": "Components",
    "display_name": "Comparative Training",
    "target": {
      "simpleName": "PoolingLayer",
      "canonicalName": "com.simiacryptus.mindseye.layers.cudnn.PoolingLayer",
      "link": "https://github.com/SimiaCryptus/mindseye-cudnn/tree/59d5b3318556370acb2d83ee6ec123ce0fc6974f/src/main/java/com/simiacryptus/mindseye/layers/cudnn/PoolingLayer.java",
      "javaDoc": ""
    }
  }