1. Test Modules
  2. Training Characteristics
    1. Input Learning
      1. Gradient Descent
      2. Conjugate Gradient Descent
      3. Limited-Memory BFGS
    2. Results
  3. Results

Subreport: Logs for com.simiacryptus.ref.lang.ReferenceCountingBase

Test Modules

Using Seed 4220312915995452416

Training Characteristics

Input Learning

In this apply, we use a network to learn this target input, given it's pre-evaluated output:

TrainingTester.java:332 executed in 0.07 seconds (0.000 gc):

    return RefArrays.stream(RefUtil.addRef(input_target)).flatMap(RefArrays::stream).map(x -> {
      try {
        return x.prettyPrint();
      } finally {
        x.freeRef();
      }
    }).reduce((a, b) -> a + "\n" + b).orElse("");

Returns

    [
    	[ [ 1.592 ], [ -1.932 ], [ -1.22 ], [ 0.092 ], [ 0.012 ], [ -0.676 ], [ -0.512 ], [ 0.716 ], ... ],
    	[ [ 0.268 ], [ -1.524 ], [ 1.056 ], [ -1.544 ], [ -0.564 ], [ -0.312 ], [ -1.964 ], [ -1.148 ], ... ],
    	[ [ -0.188 ], [ -0.588 ], [ -1.28 ], [ 0.196 ], [ -1.404 ], [ -0.352 ], [ 1.964 ], [ -1.212 ], ... ],
    	[ [ -1.784 ], [ -0.276 ], [ -1.34 ], [ 1.748 ], [ 1.776 ], [ 1.62 ], [ -1.188 ], [ 1.76 ], ... ],
    	[ [ -1.744 ], [ 1.356 ], [ -0.968 ], [ -0.868 ], [ 0.236 ], [ 1.268 ], [ -0.396 ], [ -1.912 ], ... ],
    	[ [ 1.056 ], [ 0.42 ], [ 1.504 ], [ 1.876 ], [ -1.596 ], [ -1.564 ], [ -0.484 ], [ 0.58 ], ... ],
    	[ [ -1.632 ], [ 1.332 ], [ 0.556 ], [ -0.86 ], [ -0.492 ], [ 0.668 ], [ -1.988 ], [ -1.98 ], ... ],
    	[ [ 0.072 ], [ 1.324 ], [ -1.816 ], [ -1.572 ], [ -0.076 ], [ -0.552 ], [ -1.296 ], [ -1.804 ], ... ],
    	...
    ]
    [
    	[ [ 1.232 ], [ -1.064 ], [ -1.144 ], [ -1.716 ], [ 0.088 ], [ 1.224 ], [ 1.884 ], [ 0.576 ], ... ],
    	[ [ 0.584 ], [ -1.536 ], [ -1.356 ], [ 1.196 ], [ 0.684 ], [ -1.888 ], [ -1.892 ], [ 1.04 ], ... ],
    	[ [ 0.632 ], [ 0.876 ], [ -0.356 ], [ 1.988 ], [ 0.864 ], [ 0.004 ], [ 1.784 ], [ -0.456 ], ... ],
    	[ [ -1.22 ], [ 1.268 ], [ 1.828 ], [ -0.784 ], [ -0.492 ], [ 1.836 ], [ 1.512 ], [ 0.64 ], ... ],
    	[ [ 0.232 ], [ 0.1 ], [ 0.548 ], [ -0.152 ], [ 1.612 ], [ 0.532 ], [ -1.672 ], [ -1.632 ], ... ],
    	[ [ 0.188 ], [ -1.912 ], [ 1.256 ], [ -0.72 ], [ -1.032 ], [ -1.856 ], [ -0.16 ], [ -0.448 ], ... ],
    	[ [ -1.396 ], [ -1.608 ], [ -0.98 ], [ 1.276 ], [ 0.06 ], [ -1.624 ], [ 0.388 ], [ -0.348 ], ... ],
    	[ [ 1.26 ], [ 1.116 ], [ -0.02 ], [ 1.416 ], [ 1.32 ], [ 0.3 ], [ -0.668 ], [ -1.224 ], ... ],
    	...
    ]
    [
    	[ [ -0.184 ], [ -1.528 ], [ -0.816 ], [ 1.284 ], [ -1.692 ], [ -0.924 ], [ 0.768 ], [ -0.76 ], ... ],
    	[ [ 1.968 ], [ 1.776 ], [ 1.92 ], [ -1.484 ], [ -1.496 ], [ -1.488 ], [ 0.884 ], [ -1.652 ], ... ],
    	[ [ 0.696 ], [ 0.224 ], [ 0.716 ], [ -1.22 ], [ 0.84 ], [ 1.668 ], [ 0.012 ], [ -1.108 ], ... ],
    	[ [ 1.948 ], [ -1.472 ], [ 0.16 ], [ 1.1 ], [ -1.708 ], [ 0.224 ], [ 1.604 ], [ 1.912 ], ... ],
    	[ [ -1.66 ], [ 0.884 ], [ -0.908 ], [ 0.196 ], [ -0.628 ], [ -1.916 ], [ 0.076 ], [ -1.548 ], ... ],
    	[ [ -0.924 ], [ -1.072 ], [ 0.196 ], [ -1.048 ], [ -1.772 ], [ -0.304 ], [ -1.476 ], [ 1.32 ], ... ],
    	[ [ 0.696 ], [ -1.112 ], [ 0.648 ], [ -0.192 ], [ -1.868 ], [ -0.22 ], [ -1.676 ], [ -1.116 ], ... ],
    	[ [ 1.336 ], [ -1.248 ], [ -1.272 ], [ 1.528 ], [ 1.688 ], [ -1.512 ], [ -0.424 ], [ -1.08 ], ... ],
    	...
    ]
    [
    	[ [ -0.616 ], [ 1.732 ], [ 0.54 ], [ 0.244 ], [ 0.912 ], [ 0.308 ], [ 1.316 ], [ 1.984 ], ... ],
    	[ [ 1.192 ], [ -1.312 ], [ 1.88 ], [ 1.94 ], [ 1.74 ], [ -1.924 ], [ -0.944 ], [ -1.64 ], ... ],
    	[ [ 0.936 ], [ 0.4 ], [ 1.172 ], [ -0.688 ], [ -0.664 ], [ 0.48 ], [ -0.34 ], [ 0.924 ], ... ],
    	[ [ -1.252 ], [ -0.136 ], [ 0.484 ], [ -1.68 ], [ 0.084 ], [ 0.9 ], [ -1.192 ], [ -0.748 ], ... ],
    	[ [ -0.872 ], [ -0.276 ], [ 0.76 ], [ 0.184 ], [ 1.34 ], [ 1.3 ], [ -1.496 ], [ 0.148 ], ... ],
    	[ [ 1.896 ], [ 0.028 ], [ 0.032 ], [ -1.004 ], [ -1.176 ], [ 1.004 ], [ 0.148 ], [ 0.72 ], ... ],
    	[ [ -0.06 ], [ -0.328 ], [ 1.364 ], [ 1.108 ], [ -1.832 ], [ 1.628 ], [ 0.548 ], [ 1.984 ], ... ],
    	[ [ -1.544 ], [ 1.332 ], [ -1.104 ], [ -0.556 ], [ -0.464 ], [ -0.464 ], [ -1.572 ], [ 1.328 ], ... ],
    	...
    ]
    [
    	[ [ 1.988 ], [ -1.364 ], [ -1.08 ], [ -1.08 ], [ -0.268 ], [ 1.704 ], [ -1.308 ], [ -1.664 ], ... ],
    	[ [ -0.256 ], [ -1.024 ], [ -1.736 ], [ 1.132 ], [ -1.344 ], [ 0.404 ], [ 1.132 ], [ -1.796 ], ... ],
    	[ [ -1.228 ], [ 0.728 ], [ -0.78 ], [ 0.628 ], [ 1.724 ], [ 1.7 ], [ 1.236 ], [ -0.528 ], ... ],
    	[ [ -0.556 ], [ 0.68 ], [ -1.836 ], [ 1.168 ], [ 0.924 ], [ 1.548 ], [ -0.88 ], [ -1.508 ], ... ],
    	[ [ 1.668 ], [ 1.3 ], [ 0.516 ], [ 0.484 ], [ -1.536 ], [ 1.564 ], [ -0.42 ], [ -0.86 ], ... ],
    	[ [ -0.352 ], [ 1.668 ], [ 1.216 ], [ 0.968 ], [ 0.568 ], [ -0.728 ], [ -1.784 ], [ -1.428 ], ... ],
    	[ [ -0.676 ], [ -0.816 ], [ -0.852 ], [ -0.22 ], [ -1.2 ], [ 1.048 ], [ -0.68 ], [ -0.168 ], ... ],
    	[ [ 1.664 ], [ 0.948 ], [ 1.18 ], [ 1.256 ], [ -1.588 ], [ 1.068 ], [ 1.528 ], [ 0.26 ], ... ],
    	...
    ]
    [
    	[ [ 0.344 ], [ -1.724 ], [ -0.316 ], [ -1.036 ], [ -0.24 ], [ -0.552 ], [ -0.216 ], [ -1.152 ], ... ],
    	[ [ -0.236 ], [ 1.808 ], [ -1.352 ], [ -0.104 ], [ 0.632 ], [ 1.472 ], [ 1.888 ], [ 1.924 ], ... ],
    	[ [ -1.404 ], [ 1.732 ], [ 0.484 ], [ -0.56 ], [ -1.736 ], [ 1.296 ], [ 0.536 ], [ 0.08 ], ... ],
    	[ [ 0.74 ], [ -0.508 ], [ -0.908 ], [ 1.82 ], [ -0.064 ], [ 1.184 ], [ -0.172 ], [ 0.064 ], ... ],
    	[ [ -1.232 ], [ -0.22 ], [ 0.84 ], [ 1.716 ], [ -0.996 ], [ -1.116 ], [ -0.792 ], [ -0.272 ], ... ],
    	[ [ 0.78 ], [ 1.536 ], [ 0.72 ], [ 1.616 ], [ 0.972 ], [ -0.468 ], [ -0.228 ], [ -1.704 ], ... ],
    	[ [ -0.82 ], [ 0.424 ], [ 1.872 ], [ -1.592 ], [ -0.808 ], [ -0.232 ], [ 0.952 ], [ -1.68 ], ... ],
    	[ [ 0.468 ], [ 1.02 ], [ -0.624 ], [ 1.332 ], [ 0.492 ], [ -0.732 ], [ -1.792 ], [ -0.444 ], ... ],
    	...
    ]
    [
    	[ [ 0.904 ], [ 1.548 ], [ -0.956 ], [ -1.824 ], [ 0.8 ], [ 1.152 ], [ 0.768 ], [ 0.42 ], ... ],
    	[ [ 0.78 ], [ 0.492 ], [ 0.564 ], [ -0.308 ], [ -1.084 ], [ -0.004 ], [ -1.044 ], [ 2.0 ], ... ],
    	[ [ 0.136 ], [ 0.44 ], [ -0.608 ], [ 1.288 ], [ -0.652 ], [ 1.392 ], [ -1.836 ], [ 1.664 ], ... ],
    	[ [ -0.036 ], [ 0.516 ], [ -0.792 ], [ -0.964 ], [ 0.692 ], [ -1.68 ], [ 0.124 ], [ -1.292 ], ... ],
    	[ [ 0.384 ], [ 0.344 ], [ -0.38 ], [ 1.2 ], [ -0.94 ], [ 1.996 ], [ -1.06 ], [ -0.612 ], ... ],
    	[ [ -1.856 ], [ -0.932 ], [ 1.668 ], [ 0.064 ], [ 0.22 ], [ 1.856 ], [ -1.632 ], [ 0.048 ], ... ],
    	[ [ -0.2 ], [ -0.116 ], [ -0.228 ], [ 1.784 ], [ 1.164 ], [ -0.736 ], [ 1.02 ], [ -0.052 ], ... ],
    	[ [ -1.052 ], [ -0.74 ], [ -0.372 ], [ 0.068 ], [ 0.376 ], [ -1.16 ], [ -0.532 ], [ -1.816 ], ... ],
    	...
    ]
    [
    	[ [ 0.932 ], [ -1.412 ], [ 1.036 ], [ 1.536 ], [ 0.496 ], [ 1.588 ], [ 0.26 ], [ -0.932 ], ... ],
    	[ [ 0.992 ], [ 0.364 ], [ 0.5 ], [ -1.536 ], [ 1.992 ], [ 0.68 ], [ 1.532 ], [ 0.916 ], ... ],
    	[ [ -0.98 ], [ -0.5 ], [ 0.392 ], [ -0.744 ], [ 0.688 ], [ -0.708 ], [ 0.06 ], [ 1.204 ], ... ],
    	[ [ -1.836 ], [ 1.768 ], [ 0.232 ], [ 0.768 ], [ 0.224 ], [ 0.436 ], [ -1.304 ], [ -0.128 ], ... ],
    	[ [ -0.484 ], [ 0.248 ], [ 0.392 ], [ 0.908 ], [ 0.932 ], [ 0.388 ], [ -0.62 ], [ -1.856 ], ... ],
    	[ [ 0.664 ], [ -1.212 ], [ -0.028 ], [ 0.496 ], [ -0.16 ], [ 1.964 ], [ 0.636 ], [ 0.032 ], ... ],
    	[ [ 0.548 ], [ -0.844 ], [ -1.3 ], [ -0.352 ], [ -1.808 ], [ 0.732 ], [ 1.156 ], [ -1.928 ], ... ],
    	[ [ -0.144 ], [ 0.812 ], [ 0.004 ], [ -0.364 ], [ -1.648 ], [ -0.632 ], [ -1.564 ], [ 1.064 ], ... ],
    	...
    ]
    [
    	[ [ -0.968 ], [ 1.236 ], [ -0.86 ], [ -0.772 ], [ 1.72 ], [ -0.268 ], [ -1.16 ], [ -1.38 ], ... ],
    	[ [ 1.232 ], [ 1.236 ], [ 0.544 ], [ 1.672 ], [ -0.092 ], [ -1.568 ], [ 1.02 ], [ -0.372 ], ... ],
    	[ [ -0.124 ], [ 0.692 ], [ 0.664 ], [ -0.276 ], [ -1.872 ], [ 1.408 ], [ 1.752 ], [ -0.344 ], ... ],
    	[ [ 1.788 ], [ 1.82 ], [ -0.144 ], [ -0.22 ], [ -0.468 ], [ 1.264 ], [ -1.42 ], [ 1.484 ], ... ],
    	[ [ -0.084 ], [ 1.4 ], [ -1.876 ], [ 0.816 ], [ -1.024 ], [ -0.036 ], [ 0.132 ], [ 0.448 ], ... ],
    	[ [ 1.784 ], [ -1.96 ], [ 0.132 ], [ 1.06 ], [ 1.968 ], [ -1.616 ], [ -1.928 ], [ -0.2 ], ... ],
    	[ [ 0.652 ], [ -0.428 ], [ 0.88 ], [ 0.16 ], [ 1.264 ], [ -1.448 ], [ 0.296 ], [ 0.22 ], ... ],
    	[ [ -1.812 ], [ 0.856 ], [ 0.852 ], [ -0.144 ], [ -1.264 ], [ -1.376 ], [ -1.664 ], [ 1.82 ], ... ],
    	...
    ]
    [
    	[ [ -1.164 ], [ -0.812 ], [ -0.212 ], [ -1.12 ], [ -0.644 ], [ 0.448 ], [ 0.96 ], [ -0.136 ], ... ],
    	[ [ 0.296 ], [ -1.604 ], [ -0.372 ], [ -1.428 ], [ -0.332 ], [ 0.236 ], [ -0.74 ], [ 1.124 ], ... ],
    	[ [ 1.496 ], [ -0.448 ], [ -0.264 ], [ -0.676 ], [ 1.232 ], [ 1.872 ], [ 1.884 ], [ -0.828 ], ... ],
    	[ [ 1.308 ], [ 1.516 ], [ -0.256 ], [ 0.892 ], [ -1.4 ], [ -1.512 ], [ -1.084 ], [ 1.48 ], ... ],
    	[ [ -0.524 ], [ -1.7 ], [ 0.484 ], [ 1.772 ], [ 1.124 ], [ 1.684 ], [ -1.152 ], [ -0.756 ], ... ],
    	[ [ 1.336 ], [ -1.368 ], [ 0.036 ], [ 0.084 ], [ -1.232 ], [ -1.904 ], [ 0.456 ], [ 0.12 ], ... ],
    	[ [ -0.948 ], [ -0.424 ], [ 0.548 ], [ 0.18 ], [ -0.988 ], [ -0.74 ], [ -0.536 ], [ -1.116 ], ... ],
    	[ [ 1.264 ], [ -0.776 ], [ 1.196 ], [ 0.068 ], [ 0.676 ], [ 1.632 ], [ 1.912 ], [ -0.348 ], ... ],
    	...
    ]
    [
    	[ [ -0.724 ], [ 0.732 ], [ 1.232 ], [ -1.436 ], [ -1.448 ], [ -1.052 ], [ 0.7

...skipping 3855 bytes...

    24 ], [ -0.94 ], [ 0.36 ], [ 0.812 ], [ -0.22 ], [ 0.212 ], [ 1.34 ], ... ],
    	...
    ]
    [
    	[ [ -1.704 ], [ 1.828 ], [ 1.956 ], [ 1.496 ], [ 0.552 ], [ -1.608 ], [ -0.08 ], [ 0.316 ], ... ],
    	[ [ -0.188 ], [ -1.508 ], [ 1.908 ], [ -0.288 ], [ 0.896 ], [ -1.756 ], [ -0.488 ], [ 0.864 ], ... ],
    	[ [ 0.256 ], [ -1.448 ], [ 0.948 ], [ 1.764 ], [ 1.884 ], [ 0.204 ], [ -1.7 ], [ -1.624 ], ... ],
    	[ [ 0.156 ], [ -0.412 ], [ -1.776 ], [ -0.6 ], [ 1.828 ], [ -0.836 ], [ -0.824 ], [ -1.052 ], ... ],
    	[ [ 1.248 ], [ -1.272 ], [ 1.76 ], [ -1.992 ], [ 0.96 ], [ -1.056 ], [ 0.448 ], [ -0.556 ], ... ],
    	[ [ -0.948 ], [ 1.08 ], [ 1.828 ], [ -1.556 ], [ -0.996 ], [ -1.236 ], [ -1.856 ], [ 0.884 ], ... ],
    	[ [ -0.472 ], [ 1.112 ], [ 0.716 ], [ -1.936 ], [ -0.036 ], [ -1.472 ], [ -1.64 ], [ 1.476 ], ... ],
    	[ [ -0.436 ], [ -0.444 ], [ 1.616 ], [ 1.928 ], [ 1.94 ], [ 0.84 ], [ -0.004 ], [ 1.212 ], ... ],
    	...
    ]
    [
    	[ [ -0.844 ], [ -1.908 ], [ 0.504 ], [ -0.328 ], [ 1.572 ], [ -0.388 ], [ -1.252 ], [ -0.66 ], ... ],
    	[ [ -1.216 ], [ -1.4 ], [ 0.768 ], [ 1.776 ], [ 0.652 ], [ 0.732 ], [ 0.832 ], [ 0.284 ], ... ],
    	[ [ 0.968 ], [ 1.28 ], [ -0.364 ], [ -0.008 ], [ 0.54 ], [ -0.368 ], [ -0.056 ], [ 1.764 ], ... ],
    	[ [ 0.656 ], [ 1.688 ], [ 0.188 ], [ 0.064 ], [ -1.2 ], [ -0.18 ], [ 0.488 ], [ -0.404 ], ... ],
    	[ [ 1.52 ], [ 1.968 ], [ 1.076 ], [ -0.152 ], [ 0.604 ], [ -1.056 ], [ 1.204 ], [ -0.764 ], ... ],
    	[ [ 0.164 ], [ -1.244 ], [ 1.284 ], [ -1.36 ], [ -0.372 ], [ -0.756 ], [ -0.46 ], [ 1.284 ], ... ],
    	[ [ 1.16 ], [ -1.66 ], [ -1.176 ], [ 1.78 ], [ -1.228 ], [ -1.392 ], [ -1.68 ], [ -1.776 ], ... ],
    	[ [ 0.284 ], [ 0.008 ], [ -0.764 ], [ -1.864 ], [ -1.76 ], [ 1.336 ], [ 1.016 ], [ 0.392 ], ... ],
    	...
    ]
    [
    	[ [ -0.864 ], [ -0.004 ], [ 1.164 ], [ -0.168 ], [ -1.476 ], [ -0.832 ], [ 1.54 ], [ -1.676 ], ... ],
    	[ [ 0.132 ], [ 0.296 ], [ 0.924 ], [ -0.172 ], [ -0.7 ], [ 1.556 ], [ 1.904 ], [ 0.264 ], ... ],
    	[ [ 0.844 ], [ 0.6 ], [ -1.788 ], [ -1.608 ], [ 0.052 ], [ 0.196 ], [ 1.108 ], [ -1.12 ], ... ],
    	[ [ -0.936 ], [ -0.88 ], [ 0.476 ], [ -1.912 ], [ 1.08 ], [ 1.912 ], [ -1.44 ], [ -0.604 ], ... ],
    	[ [ -0.12 ], [ -1.26 ], [ -0.924 ], [ -1.816 ], [ -0.924 ], [ -1.668 ], [ 1.208 ], [ 0.568 ], ... ],
    	[ [ 1.248 ], [ -0.408 ], [ -0.184 ], [ 0.3 ], [ 1.532 ], [ -1.62 ], [ 0.26 ], [ -1.448 ], ... ],
    	[ [ -0.644 ], [ 1.576 ], [ 1.988 ], [ 0.504 ], [ -0.86 ], [ -1.188 ], [ -1.228 ], [ -1.636 ], ... ],
    	[ [ 1.248 ], [ -1.376 ], [ 0.724 ], [ -0.392 ], [ 1.584 ], [ 0.024 ], [ -1.732 ], [ -1.412 ], ... ],
    	...
    ]
    [
    	[ [ 1.952 ], [ 0.832 ], [ 0.172 ], [ 1.796 ], [ -0.184 ], [ -0.916 ], [ 0.848 ], [ -1.6 ], ... ],
    	[ [ -1.388 ], [ 1.576 ], [ -1.94 ], [ -1.532 ], [ 0.568 ], [ -0.968 ], [ 0.396 ], [ 0.136 ], ... ],
    	[ [ -0.92 ], [ 1.008 ], [ 0.056 ], [ -1.316 ], [ 0.336 ], [ 0.94 ], [ -1.9 ], [ -1.276 ], ... ],
    	[ [ 0.644 ], [ 0.592 ], [ -1.268 ], [ 1.94 ], [ 1.464 ], [ 1.848 ], [ -0.42 ], [ 1.064 ], ... ],
    	[ [ 1.396 ], [ 0.512 ], [ 1.612 ], [ -0.38 ], [ -1.408 ], [ 1.572 ], [ -1.668 ], [ 0.58 ], ... ],
    	[ [ -0.34 ], [ -0.356 ], [ -0.464 ], [ 1.42 ], [ -0.216 ], [ -1.576 ], [ 0.896 ], [ -1.488 ], ... ],
    	[ [ -0.42 ], [ 0.288 ], [ 1.72 ], [ -1.26 ], [ 0.412 ], [ -1.788 ], [ 2.0 ], [ -0.248 ], ... ],
    	[ [ 0.636 ], [ 0.308 ], [ -1.24 ], [ 0.876 ], [ 0.076 ], [ -0.776 ], [ 0.156 ], [ -0.74 ], ... ],
    	...
    ]
    [
    	[ [ 1.5 ], [ 1.044 ], [ -0.384 ], [ 1.968 ], [ -1.168 ], [ -0.88 ], [ 0.472 ], [ -1.68 ], ... ],
    	[ [ 1.476 ], [ 1.044 ], [ -1.572 ], [ -1.36 ], [ -0.112 ], [ 0.756 ], [ -0.624 ], [ 1.452 ], ... ],
    	[ [ 0.056 ], [ 0.78 ], [ -1.564 ], [ -0.152 ], [ 1.884 ], [ -0.984 ], [ 0.724 ], [ 0.812 ], ... ],
    	[ [ 0.228 ], [ 1.412 ], [ 0.712 ], [ 1.704 ], [ -1.008 ], [ -0.396 ], [ -1.916 ], [ 0.176 ], ... ],
    	[ [ 0.784 ], [ 0.508 ], [ 1.228 ], [ -0.204 ], [ -1.848 ], [ 0.116 ], [ 0.368 ], [ 0.02 ], ... ],
    	[ [ -0.876 ], [ 1.872 ], [ -1.212 ], [ -1.204 ], [ 1.516 ], [ -0.82 ], [ -1.708 ], [ -1.948 ], ... ],
    	[ [ -0.052 ], [ 0.0 ], [ -0.484 ], [ 0.988 ], [ 0.924 ], [ -1.796 ], [ -0.752 ], [ 1.012 ], ... ],
    	[ [ -0.292 ], [ -1.932 ], [ 1.248 ], [ -1.38 ], [ -0.64 ], [ -1.056 ], [ -0.484 ], [ -0.376 ], ... ],
    	...
    ]
    [
    	[ [ 1.536 ], [ -0.688 ], [ -1.064 ], [ 0.072 ], [ 1.028 ], [ -0.496 ], [ -0.82 ], [ -1.636 ], ... ],
    	[ [ -0.22 ], [ -1.272 ], [ 0.912 ], [ 0.776 ], [ 0.204 ], [ 1.356 ], [ 1.68 ], [ 1.296 ], ... ],
    	[ [ -0.408 ], [ 1.008 ], [ -1.208 ], [ 1.056 ], [ -1.896 ], [ 1.004 ], [ 1.888 ], [ 0.908 ], ... ],
    	[ [ -1.328 ], [ 1.46 ], [ 1.96 ], [ -0.668 ], [ 1.496 ], [ -1.16 ], [ -1.02 ], [ -1.648 ], ... ],
    	[ [ 0.908 ], [ 1.164 ], [ 0.504 ], [ 1.912 ], [ 1.664 ], [ -0.488 ], [ -1.948 ], [ 0.996 ], ... ],
    	[ [ -1.18 ], [ -0.88 ], [ 0.096 ], [ 0.78 ], [ -1.952 ], [ 1.524 ], [ 0.272 ], [ 1.488 ], ... ],
    	[ [ -0.34 ], [ -0.536 ], [ -1.504 ], [ 1.292 ], [ 1.556 ], [ -1.544 ], [ -1.092 ], [ 1.08 ], ... ],
    	[ [ -1.632 ], [ 1.5 ], [ -0.636 ], [ 0.988 ], [ 0.884 ], [ -0.584 ], [ -1.968 ], [ 1.208 ], ... ],
    	...
    ]
    [
    	[ [ 0.844 ], [ 1.64 ], [ 1.2 ], [ -0.832 ], [ -0.58 ], [ 1.896 ], [ 1.656 ], [ 0.716 ], ... ],
    	[ [ -0.972 ], [ -0.188 ], [ -0.496 ], [ 1.392 ], [ -1.612 ], [ 1.476 ], [ 0.1 ], [ 1.0 ], ... ],
    	[ [ 0.408 ], [ 1.56 ], [ 1.432 ], [ 1.188 ], [ -1.924 ], [ -1.444 ], [ 1.744 ], [ -1.884 ], ... ],
    	[ [ -1.456 ], [ 0.312 ], [ -0.248 ], [ -1.284 ], [ 1.752 ], [ -1.252 ], [ 1.228 ], [ -1.464 ], ... ],
    	[ [ 0.056 ], [ 1.036 ], [ 1.04 ], [ 0.88 ], [ 1.832 ], [ 0.34 ], [ 0.212 ], [ 1.428 ], ... ],
    	[ [ 0.216 ], [ -1.252 ], [ -1.836 ], [ -0.148 ], [ 0.86 ], [ -1.592 ], [ 0.648 ], [ -1.064 ], ... ],
    	[ [ 1.032 ], [ 0.556 ], [ 0.556 ], [ -1.776 ], [ -0.72 ], [ 1.488 ], [ -1.832 ], [ -1.612 ], ... ],
    	[ [ -0.98 ], [ -1.556 ], [ -1.98 ], [ -0.316 ], [ -1.404 ], [ 1.064 ], [ 0.176 ], [ -1.156 ], ... ],
    	...
    ]
    [
    	[ [ -0.272 ], [ -0.56 ], [ -0.464 ], [ -1.732 ], [ 1.732 ], [ 1.8 ], [ -0.464 ], [ 1.228 ], ... ],
    	[ [ -0.904 ], [ 0.008 ], [ -1.012 ], [ -1.02 ], [ 0.836 ], [ -0.372 ], [ -1.136 ], [ -1.336 ], ... ],
    	[ [ 1.72 ], [ -0.052 ], [ 0.24 ], [ -0.628 ], [ 1.272 ], [ -0.232 ], [ 0.312 ], [ -1.696 ], ... ],
    	[ [ -0.492 ], [ -1.356 ], [ -1.504 ], [ 1.628 ], [ -1.688 ], [ -1.084 ], [ -1.54 ], [ 1.5 ], ... ],
    	[ [ -0.9 ], [ -0.984 ], [ -0.264 ], [ 1.816 ], [ -0.476 ], [ 0.74 ], [ -1.2 ], [ 1.968 ], ... ],
    	[ [ -0.364 ], [ -0.724 ], [ -1.68 ], [ -0.496 ], [ 0.092 ], [ 0.104 ], [ 0.208 ], [ -1.596 ], ... ],
    	[ [ 0.488 ], [ -0.692 ], [ -1.596 ], [ -0.784 ], [ 1.78 ], [ -1.432 ], [ -1.064 ], [ 0.204 ], ... ],
    	[ [ 0.124 ], [ 0.208 ], [ 1.312 ], [ -1.588 ], [ 0.656 ], [ -0.928 ], [ 0.56 ], [ -0.292 ], ... ],
    	...
    ]
    [
    	[ [ -0.648 ], [ -0.676 ], [ 1.0 ], [ -1.148 ], [ -0.032 ], [ 1.8 ], [ 0.976 ], [ 1.504 ], ... ],
    	[ [ 0.628 ], [ 1.028 ], [ 0.672 ], [ -1.312 ], [ 0.08 ], [ -1.46 ], [ 0.42 ], [ 1.052 ], ... ],
    	[ [ -1.576 ], [ 1.3 ], [ -1.344 ], [ -1.804 ], [ -1.084 ], [ -1.64 ], [ 1.424 ], [ 0.192 ], ... ],
    	[ [ -0.748 ], [ 0.432 ], [ -0.752 ], [ 0.164 ], [ 0.816 ], [ -0.096 ], [ -0.34 ], [ 0.832 ], ... ],
    	[ [ -0.588 ], [ -1.816 ], [ 1.344 ], [ 0.796 ], [ 0.872 ], [ -0.244 ], [ -1.76 ], [ -1.804 ], ... ],
    	[ [ 1.324 ], [ -1.956 ], [ 1.012 ], [ -1.74 ], [ -1.06 ], [ -1.56 ], [ 0.072 ], [ 1.34 ], ... ],
    	[ [ 1.844 ], [ -1.852 ], [ -0.348 ], [ 1.14 ], [ 0.836 ], [ -1.456 ], [ -0.62 ], [ -1.94 ], ... ],
    	[ [ -1.952 ], [ -1.848 ], [ -1.092 ], [ -1.38 ], [ 1.488 ], [ -0.204 ], [ -1.156 ], [ 1.756 ], ... ],
    	...
    ]
    [
    	[ [ -1.952 ], [ -0.792 ], [ 0.148 ], [ 0.208 ], [ -1.212 ], [ -1.304 ], [ 0.152 ], [ -0.064 ], ... ],
    	[ [ -0.816 ], [ 0.304 ], [ -1.82 ], [ -1.612 ], [ -0.572 ], [ -1.552 ], [ 0.832 ], [ -0.412 ], ... ],
    	[ [ -1.772 ], [ -0.384 ], [ -1.0 ], [ -1.868 ], [ 1.164 ], [ -0.924 ], [ -1.032 ], [ -1.296 ], ... ],
    	[ [ -1.916 ], [ 0.316 ], [ 0.788 ], [ 0.828 ], [ 1.788 ], [ -0.944 ], [ -1.484 ], [ 0.324 ], ... ],
    	[ [ -1.0 ], [ 0.384 ], [ 0.888 ], [ -1.452 ], [ 0.464 ], [ -1.208 ], [ -1.212 ], [ -1.924 ], ... ],
    	[ [ 1.032 ], [ -1.556 ], [ 1.156 ], [ 0.684 ], [ -1.096 ], [ -0.864 ], [ 0.4 ], [ -1.8 ], ... ],
    	[ [ 1.616 ], [ 0.312 ], [ -1.52 ], [ -0.788 ], [ -1.836 ], [ -0.644 ], [ -0.96 ], [ 1.24 ], ... ],
    	[ [ -1.204 ], [ -1.472 ], [ 1.248 ], [ 1.252 ], [ 1.664 ], [ -1.548 ], [ 0.892 ], [ 0.608 ], ... ],
    	...
    ]

Gradient Descent

First, we train using basic gradient descent method apply weak line search conditions.

TrainingTester.java:480 executed in 33.93 seconds (6.264 gc):

    IterativeTrainer iterativeTrainer = new IterativeTrainer(trainable.addRef());
    try {
      iterativeTrainer.setLineSearchFactory(label -> new ArmijoWolfeSearch());
      iterativeTrainer.setOrientation(new GradientDescent());
      iterativeTrainer.setMonitor(TrainingTester.getMonitor(history));
      iterativeTrainer.setTimeout(30, TimeUnit.SECONDS);
      iterativeTrainer.setMaxIterations(250);
      iterativeTrainer.setTerminateThreshold(0);
      return iterativeTrainer.run();
    } finally {
      iterativeTrainer.freeRef();
    }
Logging
Reset training subject: 6739171688845
Reset training subject: 6740133986688
Constructing line search parameters: GD
th(0)=13.32690072540889;dx=-3.701916868169126E-5
New Minimum: 13.32690072540889 > 13.326820970147011
WOLFE (weak): th(2.154434690031884)=13.326820970147011; dx=-3.7019057910328624E-5 evalInputDelta=7.975526187919968E-5
New Minimum: 13.326820970147011 > 13.32674121512378
WOLFE (weak): th(4.308869380063768)=13.32674121512378; dx=-3.701894713896577E-5 evalInputDelta=1.5951028511018706E-4
New Minimum: 13.32674121512378 > 13.326422197417358
WOLFE (weak): th(12.926608140191302)=13.326422197417358; dx=-3.70185040535147E-5 evalInputDelta=4.785279915324736E-4
New Minimum: 13.326422197417358 > 13.324986664991098
WOLFE (weak): th(51.70643256076521)=13.324986664991098; dx=-3.701651016898446E-5 evalInputDelta=0.00191406041779274
New Minimum: 13.324986664991098 > 13.317331797942012
WOLFE (weak): th(258.53216280382605)=13.317331797942012; dx=-3.700587611815726E-5 evalInputDelta=0.009568927466878208
New Minimum: 13.317331797942012 > 13.269538708935618
WOLFE (weak): th(1551.1929768229563)=13.269538708935618; dx=-3.6939413300487435E-5 evalInputDelta=0.05736201647327199
New Minimum: 13.269538708935618 > 12.927964645826933
WOLFE (weak): th(10858.350837760694)=12.927964645826933; dx=-3.646088101326358E-5 evalInputDelta=0.39893607958195787
New Minimum: 12.927964645826933 > 10.305150423174425
END: th(86866.80670208555)=10.305150423174425; dx=-3.255286733426856E-5 evalInputDelta=3.021750302234466
Fitness changed from 13.32690072540889 to 10.305150423174425
Iteration 1 complete. Error: 10.305150423174425 Total: 21.7273; Orientation: 1.0017; Line Search: 18.1313
th(0)=10.305150423174425;dx=-2.8625417842151176E-5
New Minimum: 10.305150423174425 > 5.644183675057867
END: th(187148.86177126726)=5.644183675057867; dx=-2.118484233258794E-5 evalInputDelta=4.660966748116557
Fitness changed from 10.305150423174425 to 5.644183675057867
Iteration 2 complete. Error: 5.644183675057867 Total: 6.1420; Orientation: 1.2502; Line Search: 4.2513
th(0)=5.644183675057867;dx=-1.567828798627184E-5
New Minimum: 5.644183675057867 > 1.0927139594912025
END: th(403200.00000000006)=1.0927139594912025; dx=-6.898446713959615E-6 evalInputDelta=4.551469715566665
Fitness changed from 5.644183675057867 to 1.0927139594912025
Iteration 3 complete. Error: 1.0927139594912025 Total: 6.0533; Orientation: 1.2668; Line Search: 4.1336
Final threshold in iteration 3: 1.0927139594912025 (> 0.0) after 33.924s (< 30.000s)

Returns

    1.0927139594912025

This training apply resulted in the following configuration:

TrainingTester.java:610 executed in 0.00 seconds (0.000 gc):

    RefList<double[]> state = network.state();
    assert state != null;
    String description = state.stream().map(RefArrays::toString).reduce((a, b) -> a + "\n" + b)
        .orElse("");
    state.freeRef();
    return description;

Returns

    

And regressed input:

TrainingTester.java:622 executed in 0.00 seconds (0.000 gc):

    return RefArrays.stream(RefUtil.addRef(data)).flatMap(x -> {
      return RefArrays.stream(x);
    }).limit(1).map(x -> {
      String temp_18_0015 = x.prettyPrint();
      x.freeRef();
      return temp_18_0015;
    }).reduce((a, b) -> a + "\n" + b).orElse("");

Returns

    [
    	[ [ -0.1338680288120846 ], [ -0.40924983865956194 ], [ -1.1921580864724286 ], [ -1.5243740241920218 ], [ 1.0633768449352756 ], [ 0.2702166559106374 ], [ -0.031048691634728218 ], [ -1.06262243687368 ], ... ],
    	[ [ -0.6609802339888543 ], [ -1.0869209671679703 ], [ 0.5007219539078999 ], [ 0.5186365509941337 ], [ -1.2237036034704731 ], [ -0.9062971147205711 ], [ -1.8698602743691826 ], [ -2.231051522782167 ], ... ],
    	[ [ 0.8724057809630427 ], [ 1.1458510835441926 ], [ 0.6228115827344545 ], [ 0.9569724795459523 ], [ 1.5870762224929604 ], [ -0.09996258093410249 ], [ 1.3659273164413375 ], [ -0.6224636530185763 ], ... ],
    	[ [ -0.8953514478418068 ], [ -1.2933006328464998 ], [ 0.8667346524546344 ], [ 1.759975300289206 ], [ -1.9883592126888932 ], [ 1.508960499190262 ], [ 1.6145052979972623 ], [ 1.4003556841587796 ], ... ],
    	[ [ -0.12448271124286274 ], [ 0.8920282282409073 ], [ 1.3688101671607353 ], [ -0.7102914524256942 ], [ 1.7932611216325773 ], [ 2.3544735412262416 ], [ 0.7399272956329684 ], [ 1.1645610362880332 ], ... ],
    	[ [ -1.4242893394693001 ], [ -0.1405568103752444 ], [ -0.1882124091894799 ], [ 1.5362427711951505 ], [ -0.10853141328177551 ], [ -1.2031362075048886 ], [ -0.9247290109681268 ], [ -0.9823613256452874 ], ... ],
    	[ [ 0.7659287112066875 ], [ 1.4783613256452874 ], [ 0.032091741782948824 ], [ 0.06744671815486898 ], [ 0.02907620168459113 ], [ 1.7642992276769651 ], [ 0.29361820055670707 ], [ -1.3762921706167384 ], ... ],
    	[ [ 1.4136810167160734 ], [ 2.2229915377702385 ], [ -1.6319710743764178 ], [ -0.10141424319280423 ], [ -1.1967261798206885 ], [ -0.6093570893283144 ], [ 0.3829647042946814 ], [ -1.0635278743474774 ], ... ],
    	...
    ]

To produce the following output:

TrainingTester.java:633 executed in 0.07 seconds (0.000 gc):

    Result[] array = ConstantResult.batchResultArray(pop(RefUtil.addRef(data)));
    @Nullable
    Result eval = layer.eval(array);
    assert eval != null;
    TensorList tensorList = Result.getData(eval);
    String temp_18_0016 = tensorList.stream().limit(1).map(x -> {
      String temp_18_0017 = x.prettyPrint();
      x.freeRef();
      return temp_18_0017;
    }).reduce((a, b) -> a + "\n" + b).orElse("");
    tensorList.freeRef();
    return temp_18_0016;

Returns

    [
    	[ [ 3.6626598559395775 ], [ -1.8022491932978097 ], [ -2.780790432362143 ], [ -2.0258701209601098 ], [ 0.23288422467637848 ], [ 1.4790832795531876 ], [ 1.712756541826359 ], [ 0.08688781563159953 ], ... ],
    	[ [ 2.827098830055729 ], [ -2.466604835839852 ], [ 0.8076097695394995 ], [ 2.0371827549706687 ], [ -0.30651801735236595 ], [ -3.9354855736028544 ], [ -1.8173013718459128 ], [ -4.339257613910833 ], ... ],
    	[ [ 1.770028904815213 ], [ 3.2492554177209625 ], [ -1.9299420863277281 ], [ 0.5168623977297617 ], [ 0.5593811124648025 ], [ 0.9881870953294882 ], [ 3.8416365822066876 ], [ -2.7843182650928817 ], ... ],
    	[ [ -2.6887572392090338 ], [ -0.2945031642324991 ], [ -0.9823267377268285 ], [ 1.4958765014460302 ], [ 1.0662039365555342 ], [ 3.11680249595131 ], [ -0.9434735100136882 ], [ 1.5657784207938987 ], ... ],
    	[ [ -2.1744135562143136 ], [ 3.428141141204536 ], [ -0.8079491641963235 ], [ -1.467457262128471 ], [ -0.0056943918371137325 ], [ 1.8763677061312085 ], [ -1.4763635218351578 ], [ -4.529194818559835 ], ... ],
    	[ [ 1.2065533026534996 ], [ -0.8347840518762222 ], [ 2.422937954052601 ], [ -0.6747861440242473 ], [ -3.6186570664088773 ], [ -2.7836810375244427 ], [ -2.8156450548406338 ], [ 0.5721933717735628 ], ... ],
    	[ [ -1.2823564439665622 ], [ -2.360193371773563 ], [ 0.9444587089147438 ], [ 1.153233590774345 ], [ -2.886618991577044 ], [ 0.8814961383848261 ], [ -2.327908997216465 ], [ -0.9934608530836917 ], ... ],
    	[ [ 2.7124050835803675 ], [ 2.478957688851193 ], [ -1.6678553718820888 ], [ 0.15692878403597887 ], [ 0.5283691008965581 ], [ -1.6467854466415728 ], [ -0.8891764785265935 ], [ -2.7456393717373873 ], ... ],
    	...
    ]

Conjugate Gradient Descent

First, we use a conjugate gradient descent method, which converges the fastest for purely linear functions.

TrainingTester.java:452 executed in 42.80 seconds (3.011 gc):

    IterativeTrainer iterativeTrainer = new IterativeTrainer(trainable.addRef());
    try {
      iterativeTrainer.setLineSearchFactory(label -> new QuadraticSearch());
      iterativeTrainer.setOrientation(new GradientDescent());
      iterativeTrainer.setMonitor(TrainingTester.getMonitor(history));
      iterativeTrainer.setTimeout(30, TimeUnit.SECONDS);
      iterativeTrainer.setMaxIterations(250);
      iterativeTrainer.setTerminateThreshold(0);
      return iterativeTrainer.run();
    } finally {
      iterativeTrainer.freeRef();
    }
Logging
Reset training subject: 6773359124593
Reset training subject: 6773997223211
Constructing line search parameters: GD
F(0.0) = LineSearchPoint{point=PointSample{avg=13.32690072540889}, derivative=-3.701916868169126E-5}
New Minimum: 13.32690072540889 > 13.326900725408885
F(1.0E-10) = LineSearchPoint{point=PointSample{avg=13.326900725408885}, derivative=-3.7019168681691254E-5}, evalInputDelta = -5.329070518200751E-15
New Minimum: 13.326900725408885 > 13.326900725408866
F(7.000000000000001E-10) = LineSearchPoint{point=PointSample{avg=13.326900725408866}, derivative=-3.7019168681691234E-5}, evalInputDelta = -2.4868995751603507E-14
New Minimum: 13.326900725408866 > 13.32690072540871
F(4.900000000000001E-9) = LineSearchPoint{point=PointSample{avg=13.32690072540871}, derivative=-3.701916868169104E-5}, evalInputDelta = -1.8118839761882555E-13
New Minimum: 13.32690072540871 > 13.32690072540762
F(3.430000000000001E-8) = LineSearchPoint{point=PointSample{avg=13.32690072540762}, derivative=-3.701916868168954E-5}, evalInputDelta = -1.270095140171179E-12
New Minimum: 13.32690072540762 > 13.3269007254
F(2.4010000000000004E-7) = LineSearchPoint{point=PointSample{avg=13.3269007254}, derivative=-3.7019168681678975E-5}, evalInputDelta = -8.890665981198254E-12
New Minimum: 13.3269007254 > 13.326900725346672
F(1.6807000000000003E-6) = LineSearchPoint{point=PointSample{avg=13.326900725346672}, derivative=-3.701916868160495E-5}, evalInputDelta = -6.221867465683317E-11
New Minimum: 13.326900725346672 > 13.326900724973365
F(1.1764900000000001E-5) = LineSearchPoint{point=PointSample{avg=13.326900724973365}, derivative=-3.70191686810864E-5}, evalInputDelta = -4.35525393527314E-10
New Minimum: 13.326900724973365 > 13.326900722360202
F(8.235430000000001E-5) = LineSearchPoint{point=PointSample{avg=13.326900722360202}, derivative=-3.7019168677457106E-5}, evalInputDelta = -3.0486884128322345E-9
New Minimum: 13.326900722360202 > 13.326900704068075
F(5.764801000000001E-4) = LineSearchPoint{point=PointSample{avg=13.326900704068075}, derivative=-3.7019168652051315E-5}, evalInputDelta = -2.1340815337111962E-8
New Minimum: 13.326900704068075 > 13.326900576023192
F(0.004035360700000001) = LineSearchPoint{point=PointSample{avg=13.326900576023192}, derivative=-3.701916847421124E-5}, evalInputDelta = -1.4938569847799954E-7
New Minimum: 13.326900576023192 > 13.32689967970902
F(0.028247524900000005) = LineSearchPoint{point=PointSample{avg=13.32689967970902}, derivative=-3.701916722933045E-5}, evalInputDelta = -1.0456998698060715E-6
New Minimum: 13.32689967970902 > 13.32689340551067
F(0.19773267430000002) = LineSearchPoint{point=PointSample{avg=13.32689340551067}, derivative=-3.701915851516465E-5}, evalInputDelta = -7.319898220004006E-6
New Minimum: 13.32689340551067 > 13.326849486163573
F(1.3841287201) = LineSearchPoint{point=PointSample{avg=13.326849486163573}, derivative=-3.701909751600439E-5}, evalInputDelta = -5.123924531780233E-5
New Minimum: 13.326849486163573 > 13.326542052760235
F(9.688901040700001) = LineSearchPoint{point=PointSample{avg=13.326542052760235}, derivative=-3.7018670521883016E-5}, evalInputDelta = -3.5867264865530046E-4
New Minimum: 13.326542052760235 > 13.324390118227345
F(67.8223072849) = LineSearchPoint{point=PointSample{avg=13.324390118227345}, derivative=-3.7015681563033364E-5}, evalInputDelta = -0.002510607181545055
New Minimum: 13.324390118227345 > 13.309331441731171
F(474.7561509943) = LineSearchPoint{point=PointSample{avg=13.309331441731171}, derivative=-3.699475885108536E-5}, evalInputDelta = -0.017569283677719483
New Minimum: 13.309331441731171 > 13.204159102726601
F(3323.2930569601003) = LineSearchPoint{point=PointSample{avg=13.204159102726601}, derivative=-3.684829986744935E-5}, evalInputDelta = -0.12274162268228928
New Minimum: 13.204159102726601 > 12.479634156657323
F(23263.0513987207) = LineSearchPoint{point=PointSample{avg=12.479634156657323}, derivative=-3.5823086981996786E-5}, evalInputDelta = -0.8472665687515679
New Minimum: 12.479634156657323 > 7.980349455346278
F(162841.3597910449) = LineSearchPoint{point=PointSample{avg=7.980349455346278}, derivative=-2.8646596783829297E-5}, evalInputDelta = -5.346551270062612
New Minimum: 7.980349455346278 > 4.532462683689721
F(1139889.5185373144) = LineSearchPoint{point=PointSample{avg=4.532462683689721}, derivative=2.1588834603343048E-5}, evalInputDelta = -8.79443804171917
4.532462683689721 <= 13.32690072540889
New Minimum: 4.532462683689721 > 4.1755511922277604E-30
F(719999.9999999995) = LineSearchPoint{point=PointSample{avg=4.1755511922277604E-30}, derivative=-2.015956002215121E-20}, evalInputDelta = -13.32690072540889
Left bracket at 719999.9999999995
Converged to left
Fitness changed from 13.32690072540889 to 4.1755511922277604E-30
Iteration 1 complete. Error: 4.1755511922277604E-30 Total: 42.8011; Orientation: 1.3722; Line Search: 39.5015
Final threshold in iteration 1: 4.1755511922277604E-30 (> 0.0) after 42.801s (< 30.000s)

Returns

    4.1755511922277604E-30

Training Converged

Limited-Memory BFGS

Next, we apply the same optimization using L-BFGS, which is nearly ideal for purely second-order or quadratic functions.

TrainingTester.java:509 executed in 397.21 seconds (12.106 gc):

    IterativeTrainer iterativeTrainer = new IterativeTrainer(trainable.addRef());
    try {
      iterativeTrainer.setLineSearchFactory(label -> new ArmijoWolfeSearch());
      iterativeTrainer.setOrientation(new LBFGS());
      iterativeTrainer.setMonitor(TrainingTester.getMonitor(history));
      iterativeTrainer.setTimeout(30, TimeUnit.SECONDS);
      iterativeTrainer.setIterationsPerSample(100);
      iterativeTrainer.setMaxIterations(250);
      iterativeTrainer.setTerminateThreshold(0);
      return iterativeTrainer.run();
    } finally {
      iterativeTrainer.freeRef();
    }
Logging
Reset training subject: 6816335020170
Reset training subject: 6817006436990
Adding measurement 7179a330 to history. Total: 0
LBFGS Accumulation History: 1 points
Constructing line search parameters: GD
Non-optimal measurement 13.32690072540889 < 13.32690072540889. Total: 1
th(0)=13.32690072540889;dx=-3.701916868169126E-5
Adding measurement 7f3957be to history. Total: 1
New Minimum: 13.32690072540889 > 13.326820970147011
WOLFE (weak): th(2.154434690031884)=13.326820970147011; dx=-3.7019057910328624E-5 evalInputDelta=7.975526187919968E-5
Adding measurement 70c05b60 to history. Total: 2
New Minimum: 13.326820970147011 > 13.32674121512378
WOLFE (weak): th(4.308869380063768)=13.32674121512378; dx=-3.701894713896577E-5 evalInputDelta=1.5951028511018706E-4
Adding measurement 2cf84ce5 to history. Total: 3
New Minimum: 13.32674121512378 > 13.326422197417358
WOLFE (weak): th(12.926608140191302)=13.326422197417358; dx=-3.70185040535147E-5 evalInputDelta=4.785279915324736E-4
Adding measurement 3bef3150 to history. Total: 4
New Minimum: 13.326422197417358 > 13.324986664991098
WOLFE (weak): th(51.70643256076521)=13.324986664991098; dx=-3.701651016898446E-5 evalInputDelta=0.00191406041779274
Adding measurement 12071a8e to history. Total: 5
New Minimum: 13.324986664991098 > 13.317331797942012
WOLFE (weak): th(258.53216280382605)=13.317331797942012; dx=-3.700587611815726E-5 evalInputDelta=0.009568927466878208
Adding measurement 47ef26cb to history. Total: 6
New Minimum: 13.317331797942012 > 13.269538708935618
WOLFE (weak): th(1551.1929768229563)=13.269538708935618; dx=-3.6939413300487435E-5 evalInputDelta=0.05736201647327199
Adding measurement 1f6b24a6 to history. Total: 7
New Minimum: 13.269538708935618 > 12.927964645826933
WOLFE (weak): th(10858.350837760694)=12.927964645826933; dx=-3.646088101326358E-5 evalInputDelta=0.39893607958195787
Adding measurement 2327ac3a to history. Total: 8
New Minimum: 12.927964645826933 > 10.305150423174425
END: th(86866.80670208555)=10.305150423174425; dx=-3.255286733426856E-5 evalInputDelta=3.021750302234466
Fitness changed from 13.32690072540889 to 10.305150423174425
Iteration 1 complete. Error: 10.305150423174425 Total: 28.1426; Orientation: 1.5606; Line Search: 24.6171
Non-optimal measurement 10.305150423174425 < 10.305150423174425. Total: 9
Rejected: LBFGS Orientation magnitude: 3.852e+03, gradient 5.350e-03, dot -1.000; [5be57691-7b67-482f-8ae5-eb46afe1eedc = 1.000/1.000e+00, 04274889-4931-4b0a-a9b3-30f2734ea612 = 1.000/1.000e+00, cff1b004-aec5-4a40-91f3-98dcf81435bc = 1.000/1.000e+00, 7bbebe20-7f56-42d2-874a-af60ebc950f8 = 1.000/1.000e+00, e480e1bf-35cc-4af8-b20c-9c8f0976453d = 1.000/1.000e+00, 0de914e1-680e-4d96-ae93-40cbda60cbaa = 1.000/1.000e+00, c2ba0e52-7647-49aa-9902-bddfb1445315 = 1.000/1.000e+00, 5fcadd41-4323-4b61-9204-f3fa53bde66e = 1.000/1.000e+00, da7faaed-33b7-4bb0-9fa2-b495dae177c8 = 1.000/1.000e+00, 1a6a7296-d3ed-404e-a005-0505ef91d7f2 = 1.000/1.000e+00, 6328b8d9-3bc1-43df-9cb4-c2f4ca2c95e8 = 1.000/1.000e+00, 5322f35b-1752-4a96-8e48-359016dddc0f = 1.000/1.000e+00, 2e52081a-6fa9-4cd5-965f-c29d9cdbf27b = 1.000/1.000e+00, fd28309c-2dfd-47a0-8001-374abd546372 = 1.000/1.000e+00, 7c94962e-556c-41e1-ad96-871410740b9c = 1.000/1.000e+00, eefa8b0e-4948-48b2-bc04-3f0b9512bc5e = 1.000/1.000e+00, 89750bf3-f32e-4d88-9ae0-3d770fdce7b7 = 1.000/1.000e+00, c04b6196-f3f5-4e40-b1e1-de832e107eac = 1.000/1.000e+00, c4f78eaa-96b3-49fb-af43-94e4d45592f9 = 1.000/1.000e+00, 2611d888-5321-44c6-abd6-b428e27c6591 = 1.000/1.000e+00, 9caa5fa8-0b30-4d34-9b97-33bda86af032 = 1.000/1.000e+00, 9dda64d6-b87a-4796-b58f-2a3917d802e3 = 1.000/1.000e+00, b84af4db-52c6-45f4-adb6-8d1b41205a00 = 1.000/1.000e+00, 50d9471d-21d1-492d-a93d-1e0d7b6470a1 = 1.000/1.000e+00, 5d1d5d94-9fe5-4f82-bea1-71808e435a13 = 1.000/1.000e+00]
Orientation rejected. Popping history element from 10.305150423174425, 12.927964645826933, 13.269538708935618, 13.317331797942012, 13.324986664991098, 13.326422197417358, 13.32674121512378, 13.326820970147011, 13.32690072540889
Rejected: LBFGS Orientation magnitude: 3.852e+03, gradient 5.350e-03, dot -1.000; [7bbebe20-7f56-42d2-874a-af60ebc950f8 = 1.000/1.000e+00, 2611d888-5321-44c6-abd6-b428e27c6591 = 1.000/1.000e+00, 1a6a7296-d3ed-404e-a005-0505ef91d7f2 = 1.000/1.000e+00, 2e52081a-6fa9-4cd5-965f-c29d9cdbf27b = 1.000/1.000e+00, da7faaed-33b7-4bb0-9fa2-b495dae177c8 = 1.000/1.000e+00, b84af4db-52c6-45f4-adb6-8d1b41205a00 = 1.000/1.000e+00, 0de914e1-680e-4d96-ae93-40cbda60cbaa = 1.000/1.000e+00, eefa8b0e-4948-48b2-bc04-3f0b9512bc5e = 1.000/1.000e+00, 5322f35b-1752-4a96-8e48-359016dddc0f = 1.000/1.000e+00, 04274889-4931-4b0a-a9b3-30f2734ea612 = 1.000/1.000e+00, 6328b8d9-3bc1-43df-9cb4-c2f4ca2c95e8 = 1.000/1.000e+00, 50d9471d-21d1-492d-a93d-1e0d7b6470a1 = 1.000/1.000e+00, c2ba0e52-7647-49aa-9902-bddfb1445315 = 1.000/1.000e+00, 7c94962e-556c-41e1-ad96-871410740b9c = 1.000/1.000e+00, e480e1bf-35cc-4af8-b20c-9c8f0976453d = 1.000/1.000e+00, 9caa5fa8-0b30-4d34-9b97-33bda86af032 = 1.000/1.000e+00, 5be57691-7b67-482f-8ae5-eb46afe1eedc = 1.000/1.000e+00, cff1b004-aec5-4a40-91f3-98dcf81435bc = 1.000/1.000e+00, 5fcadd41-4323-4b61-9204-f3fa53bde66e = 1.000/1.000e+00, 89750bf3-f32e-4d88-9ae0-3d770fdce7b7 = 1.000/1.000e+00, c4f78eaa-96b3-49fb-af43-94e4d45592f9 = 1.000/1.000e+00, c04b6196-f3f5-4e40-b1e1-de832e107eac = 1.000/1.000e+00, fd28309c-2dfd-47a0-8001-374abd546372 = 1.000/1.000e+00, 5d1d5d94-9fe5-4f82-bea1-71808e435a13 = 1.000/1.000e+00, 9dda64d6-b87a-4796-b58f-2a3917d802e3 = 1.000/1.000e+00]
Orientation rejected. Popping history element from 10.305150423174425, 12.927964645826933, 13.269538708935618, 13.317331797942012, 13.324986664991098, 13.326422197417358, 13.32674121512378, 13.326820970147011
Rejected: LBFGS Orientation magnitude: 3.852e+03, gradient 5.350e-03, dot -1.000; [50d9471d-21d1-492d-a93d-1e0d7b6470a1 = 1.000/1.000e+00, 5322f35b-1752-4a96-8e48-359016dddc0f = 1.000/1.000e+00, fd28309c-2dfd-47a0-8001-374abd546372 = 1.000/1.000e+00, eefa8b0e-4948-48b2-bc04-3f0b9512bc5e = 1.000/1.000e+00, 04274889-4931-4b0a-a9b3-30f2734ea612 = 1.000/1.000e+00, 9dda64d6-b87a-4796-b58f-2a3917d802e3 = 1.000/1.000e+00, 7bbebe20-7f56-42d2-874a-af60ebc950f8 = 1.000/1.000e+00, e480e1bf-35cc-4af8-b20c-9c8f0976453d = 1.000/1.000e+00, c4f78eaa-96b3-49fb-af43-94e4d45592f9 = 1.000/1.000e+00, 9caa5fa8-0b30-4d34-9b97-33bda86af032 = 1.000/1.000e+00, 0de914e1-680e-4d96-ae93-40cbda60cbaa = 1.000/1.000e+00, 5be57691-7b67-482f-8ae5-eb46afe1eedc = 1.000/1.000e+00, 2611d888-5321-44c6-abd6-b428e27c6591 = 1.000/1.000e+00, c04b6196-f3f5-4e40-b1e1-de832e107eac = 1.000/1.000e+00, 89750bf3-f32e-4d88-9ae0-3d770fdce7b7 = 1.000/1.000e+00, 2e52081a-6fa9-4cd5-965f-c29d9cdbf27b = 1.000/1.000e+00, b84af4db-52c6-45f4-adb6-8d1b41205a00 = 1.000/1.000e+00, cff1b004-aec5-4a40-91f3-98dcf81435bc = 1.000/1.000e+00, 1a6a7296-d3ed-404e-a005-0505ef91d7f2 = 1.000/1.000e+00, 7c94962e-556c-41e1-ad96-871410740b9c = 1.000/1.000e+00, 5d1d5d94-9fe5-4f82-bea1-71808e435a13 = 1.000/1.000e+00, c2ba0e52-7647-49aa-9902-bddfb1445315 = 1.000/1.000e+00, 6328b8d9-3bc1-43df-9cb4-c2f4ca2c95e8 = 1.000/1.000e+00, da7faaed-33b7-4bb0-9fa2-b495dae177c8 = 1.000/1.000e+00, 5fcadd41-4323-4b61-9204-f3fa53bde66e = 1.000/1.000e+00]
Orientation rejected. Popping history element from 10.305150423174425, 12.927964645826933, 13.269538708935618, 13.317331797942012, 13.324986664991098, 13.326422197417358, 13.32674121512378
Rejected: LBFGS Orientation magnitude: 3.852e+03, gradient 5.350e-03, dot -1.000; [0de914e1-680e-4d96-ae93-40cbda60cbaa = 1.000/1.000e+00, 9dda64d6-b87a-4796-b58f-2a3917d802e3 = 1.000/1.000e+00, eefa8b0e-4948-48b2-bc04-3f0b9512bc5e = 1.000/1.000e+00, 7bbebe20-7f56-42d2-874a-af60ebc950f8 = 1.000/1.000e+00, 6328b8d9-3bc1-43df-9cb4-c2f4ca2c95e8 = 1.000/1.000e+00, 1a6a7296-d3ed-404e-a005-0505ef91d7f2 = 1.000/1.000e+00, 2e52081a-6fa9-4cd5-965f-c29d9cdbf27b = 1.000/1.000e+00, c2ba0e52-7647-49aa-9902-bddfb1445315 = 1.000/1.000e+00, 5d1d5d94-9fe5-4f82-bea1-71808e435a13 = 1.000/1.000e+00, c4f78eaa-96b3-49fb-af43-94e4d45592f9 = 1.000/1.000e+00, 5fcadd41-4323-4b61-9204-f3fa53bde66e = 1.000/1.000e+00, b84af4db-52c6-45f4-adb6-8d1b41205a00 = 1.000/1.000e+00, 9caa5fa8-0b30-4d34-9b97-33bda86af032 = 1.000/1.000e+00, fd28309c-2dfd-47a0-8001-374abd546372 = 1.000/1.000e+00, 7c94962e-556c-41e1-ad96-871410740b9c = 1.000/1.000e+00, e480e1bf-35cc-4af8-b20c-9c8f0976453d = 1.000/1.000e+00, 2611d888-5321-44c6-abd6-b428e27c6591 = 1.000/1.000e+00, c04b6196-f3f5-4e40-b1e1-de832e107eac = 1.000/1.000e+00, da7faaed-33b7-4bb0-9fa2-b495dae177c8 = 1.000/1.000e+00, 50d9471d-21d1-492d-a93d-1e0d7b6470a1 = 1.000/1.000e+00, 5be57691-7b67-482f-8ae5-eb46afe1eedc = 1.000/1.000e+00, 89750bf3-f32e-4d88-9ae0-3d770fdce7b7 = 1.000/1.000e+00, 5322f35b-1752-4a96-8e48-359016dddc0f = 1.000/1.000e+00, cff1b004-aec5-4a40-91f3-98dcf81435bc = 1.000/1.000e+00, 04274889-4931-4b0a-a9b3-30f2734ea612 = 1.000/1.000e+00]
Orientation rejected. Popping history element from 10.305150423174425, 12.927964645826933, 13.269538708935618, 13.317331797942012, 13.324986664991098, 13.326422197417358
Rejected: LBFGS Orientation magnitude: 3.852e+03, gradient 5.350e-03, dot -1.000; [04274889-4931-4b0a-a9b3-30f2734ea612 = 1.000/1.000e+00, da7faaed-33b7-4bb0-9fa2-b495dae177c8 = 1.000/1.000e+00, c4f78eaa-96b3-49fb-af43-94e4d45592f9 = 1.000/1.000e+00, 0de914e1-680e-4d96-ae93-40cbda60cbaa = 1.000/1.000e+00, 6328b8d9-3bc1-43df-9cb4-c2f4ca2c95e8 = 1.000/1.000e+00, cff1b004-aec5-4a40-91f3-98dcf81435bc = 1.000/1.000e+00, e480e1bf-35cc-4af8-b20c-9c8f0976453d = 1.000/1.000e+00, 9dda64d6-b87a-4796-b58f-2a3917d802e3 = 1.000/1.000e+00, 5d1d5d94-9fe5-4f82-bea1-71808e435a13 = 1.000/1.000e+00, 1a6a7296-d3ed-404e-a005-0505ef91d7f2 = 1.000/1.000e+00, 2e52081a-6fa9-4cd5-965f-c29d9cdbf27b = 1.000/1.000e+00, fd28309c-2dfd-47a0-8001-374abd546372 = 1.000/1.000e+00, 7bbebe20-7f56-42d2-874a-af60ebc950f8 = 1.000/1.000e+00, c2ba0e52-7647-49aa-9902-bddfb1445315 = 1.000/1.000e+00, 2611d888-5321-44c6-abd6-b428e27c6591 = 1.000/1.000e+00, 9caa5fa8-0b30-4d34-9b97-33bda86af032 = 1.000/1.000e+00, c04b6196-f3f5-4e40-b1e1-de832e107eac = 1.000/1.000e+00, eefa8b0e-4948-48b2-bc04-3f0b9512bc5e = 1.000/1.000e+00, 50d9471d-21d1-492d-a93d-1e0d7b6470a1 = 1.000/1.000e+00, 7c94962e-556c-41e1-ad96-871410740b9c = 1.000/1.000e+00, b84af4db-52c6-45f4-adb6-8d1b41205a00 = 1.000/1.000e+00, 5be57691-7b67-482f-8ae5-eb46afe1eedc = 1.000/1.000e+00, 89750bf3-f32e-4d88-9ae0-3d770fdce7b7 = 1.000/1.000e+00, 5fcadd41-4323-4b61-9204-f3fa53bde66e = 1.000/1.000e+00, 5322f35b-1752-4a96-8e48-359016dddc0f = 1.000/1.000e+00]
Orientation rejected. Popping history element from 10.305150423174425, 12.927964645826933, 13.269538708935618, 13.317331797942012, 13.324986664991098
Rejected: LBFGS Orientation magnitude: 3.852e+03, gradient 5.350e-03, dot -1.000; [0de914e1-680e-4d96-ae93-40cbda60cbaa = 1.000/1.000e+00, da7faaed-33b7-4bb0-9fa2-b495dae177c8 = 1.000/1.000e+00, eefa8b0e-4948-48b2-bc04-3f0b9512bc5e = 1.000/1.000e+00, 5be57691-7b67-482f-8ae5-eb46afe1eedc = 1.000/1.000e+00, 2611d888-5321-44c6-abd6-b428e27c6591 = 1.000/1.000e+00, 5d1d5d94-9fe5-4f82-bea1-71808e435a13 = 1.000/1.000e+00, c04b6196-f3f5-4e40-b1e1-de832e107eac = 1.000/1.000e+00, 7c94962e-556c-41e1-ad96-871410740b9c = 1.000/1.000e+00, c2ba0e52-7647-49aa-9902-bddfb1445315 = 1.000/1.000e+00, 2e52081a-6fa9-4cd5-965f-c29d9cdbf27b = 1.000/1.000e+00, 5322f35b-1752-4a96-8e48-359016dddc0f = 1.000/1.000e+00, e480e1bf-35cc-4af8-b20c-9c8f0976453d = 1.000/1.000e+00, 89750bf3-f32e-4d88-9ae0-3d770fdce7b7 = 1.000/1.000e+00, 9dda64d6-b87a-4796-b58f-2a3917d802e3 = 1.000/1.000e+00, 04274889-4931-4b0a-a9b3-30f2734ea612 = 1.000/1.000e+00, 6328b8d9-3bc1-43df-9cb4-c2f4ca2c95e8 = 1.000/1.000e+00, 7bbebe20-7f56-42d2-874a-af60ebc950f8 = 1.000/1.000e+00, 5fcadd41-4323-4b61-9204-f3fa53bde66e = 1.000/1.000e+00, 50d9471d-21d1-492d-a93d-1e0d7b6470a1 = 1.000/1.000e+00, c4f78eaa-96b3-49fb-af43-94e4d45592f9 = 1.000/1.000e+00, b84af4db-52c6-45f4-adb6-8d1b41205a00 = 1.000/1.000e+00, cff1b004-aec5-4a40-91f3-98dcf81435bc = 1.000/1.000e+00, 9caa5fa8-0b30-4d34-9b97-33bda86af032 = 1.000/1.000e+00, fd28309c-2dfd-47a0-8001-374abd546372 = 1.000/1.000e+00, 1a6a7296-d3ed-404e-a005-0505ef91d7f2 = 1.000/1.000e+00]
Orientation rejected. Popping history element from 10.305150423174425, 12.927964645826933, 13.269538708935618, 13.317331797942012
LBFGS Accumulation History: 3 points
Removed measurement 2327ac3a to history. Total: 8
Removed measurement 1f6b24a6 to history. Total: 7
Removed measurement 47ef26cb to history. Total: 6
Removed measurement 12071a8e to history. Total: 5
Removed measurement 3bef3150 to history. Total: 4
Removed measurement 2cf84ce5 to history. Total: 3
Adding measurement c083a0e to history. Total: 3
th(0)=10.305150423174425;dx=-2.8625417842151176E-5
Adding measurement 78ecb5eb to history. Total: 4
New Minimum: 10.305150423174425 > 5.644183675057867
END: th(187148.86177126726)=5.644183675057867; dx=-2.118484233258794E-5 evalInputDelta=4.660966748116557
Fitness changed from 10.305150423174425 to 5.644183675057867
Iteration 2 complete. Error: 5.644183675057867 Total: 369.0677; Orientation: 363.4721; Line Search: 4.9529
Final threshold in iteration 2: 5.644183675057867 (> 0.0) after 397.210s (< 30.000s)

Returns

    5.644183675057867

This training apply resulted in the following configuration:

TrainingTester.java:610 executed in 0.00 seconds (0.000 gc):

    RefList<double[]> state = network.state();
    assert state != null;
    String description = state.stream().map(RefArrays::toString).reduce((a, b) -> a + "\n" + b)
        .orElse("");
    state.freeRef();
    return description;

Returns

    

And regressed input:

TrainingTester.java:622 executed in 0.00 seconds (0.000 gc):

    return RefArrays.stream(RefUtil.addRef(data)).flatMap(x -> {
      return RefArrays.stream(x);
    }).limit(1).map(x -> {
      String temp_18_0015 = x.prettyPrint();
      x.freeRef();
      return temp_18_0015;
    }).reduce((a, b) -> a + "\n" + b).orElse("");

Returns

    [
    	[ [ -0.22279097457291935 ], [ 0.18988673031917774 ], [ -0.9530865601646104 ], [ -1.7407046004364135 ], [ 1.3639655566710813 ], [ 0.23027421797872147 ], [ -0.14183793553347324 ], [ -1.2573782656220003 ], ... ],
    	[ [ -0.8974278045201234 ], [ -0.7933294708362958 ], [ 0.25727716797249955 ], [ 0.7215557977139405 ], [ -1.0522718260692572 ], [ -0.5823843516376612 ], [ -1.623791532657233 ], [ -2.0129716426867423 ], ... ],
    	[ [ 1.107104047643279 ], [ 1.5554797353277103 ], [ 0.2659535971237599 ], [ 0.858428362604437 ], [ 1.3832823238476373 ], [ -0.7393331384865971 ], [ 1.1586348100939488 ], [ -0.7253810295876737 ], ... ],
    	[ [ -0.85074419964047 ], [ -1.3845559837420451 ], [ 0.7958878464878053 ], [ 1.7456893188391047 ], [ -1.8656163924747573 ], [ 0.7424738617960496 ], [ 1.411002949993778 ], [ 1.2755720094517722 ], ... ],
    	[ [ -0.07316979827923344 ], [ 0.9083550641838803 ], [ 1.1763867435471254 ], [ -1.0441169373311234 ], [ 1.5311570946194937 ], [ 2.1326035027869126 ], [ 1.3588893082567466 ], [ 1.4890569006546206 ], ... ],
    	[ [ -1.5916394078847729 ], [ -0.13210184176191914 ], [ -0.6415736572488182 ], [ 1.3461517527162512 ], [ 0.2451195152686922 ], [ -1.0340368352383829 ], [ -0.6853659340184697 ], [ -1.0260939219211078 ], ... ],
    	[ [ 1.220456161833381 ], [ 1.5220939219211078 ], [ 0.08515395859761091 ], [ 0.07794254126106587 ], [ 0.6515368220104352 ], [ 1.6068618810840116 ], [ 0.5685504558106981 ], [ -1.2147731150380416 ], ... ],
    	[ [ 1.3944386743547124 ], [ 1.9702171312959966 ], [ -1.2847342599464036 ], [ -0.5888869163472824 ], [ -1.286232226865201 ], [ -0.7332661121098056 ], [ 0.7756834188515487 ], [ -1.1209633507897216 ], ... ],
    	...
    ]

To produce the following output:

TrainingTester.java:633 executed in 0.07 seconds (0.000 gc):

    Result[] array = ConstantResult.batchResultArray(pop(RefUtil.addRef(data)));
    @Nullable
    Result eval = layer.eval(array);
    assert eval != null;
    TensorList tensorList = Result.getData(eval);
    String temp_18_0016 = tensorList.stream().limit(1).map(x -> {
      String temp_18_0017 = x.prettyPrint();
      x.freeRef();
      return temp_18_0017;
    }).reduce((a, b) -> a + "\n" + b).orElse("");
    tensorList.freeRef();
    return temp_18_0016;

Returns

    [
    	[ [ 3.2180451271354036 ], [ 1.193433651595889 ], [ -1.5854328008230518 ], [ -3.1075230021820675 ], [ 1.7358277833554068 ], [ 1.2793710898936077 ], [ 1.1588103223326338 ], [ -0.8868913281100017 ], ... ],
    	[ [ 1.6448609773993839 ], [ -0.9986473541814794 ], [ -0.4096141601375019 ], [ 3.0517789885697026 ], [ 0.5506408696537141 ], [ -2.3159217581883054 ], [ -0.5869576632861655 ], [ -3.248858213433711 ], ... ],
    	[ [ 2.9435202382163945 ], [ 5.297398676638551 ], [ -3.714232014381201 ], [ 0.024141813022184877 ], [ -0.4595883807618133 ], [ -2.208665692432985 ], [ 2.805174050469744 ], [ -3.2989051479383686 ], ... ],
    	[ [ -2.46572099820235 ], [ -0.7507799187102256 ], [ -1.3365607675609739 ], [ 1.4244465941955238 ], [ 1.6799180376262135 ], [ -0.7156306910197525 ], [ -1.9609852500311105 ], [ 0.941860047258861 ], ... ],
    	[ [ -1.917848991396167 ], [ 3.5097753209194016 ], [ -1.770066282264373 ], [ -3.136584686655617 ], [ -1.316214526902532 ], [ 0.7670175139345641 ], [ 1.6184465412837328 ], [ -2.906715496726898 ], ... ],
    	[ [ 0.3698029605761364 ], [ -0.792509208809596 ], [ 0.15613171375590906 ], [ -1.625241236418744 ], [ -1.850402423656539 ], [ -1.9381841761919145 ], [ -1.6188296700923486 ], [ 0.3535303903944609 ], ... ],
    	[ [ 0.9902808091669049 ], [ -2.1415303903944602 ], [ 1.2097697929880544 ], [ 1.2057127063053295 ], [ 0.22568411005217603 ], [ 0.09430940542005861 ], [ -0.9532477209465096 ], [ -0.1858655751902083 ], ... ],
    	[ [ 2.6161933717735626 ], [ 1.2150856564799832 ], [ 0.06832870026798221 ], [ -2.280434581736412 ], [ 0.08083886567399623 ], [ -2.2663305605490285 ], [ 1.0744170942577431 ], [ -3.0328167539486084 ], ... ],
    	...
    ]

TrainingTester.java:432 executed in 0.14 seconds (0.000 gc):

    return TestUtil.compare(title + " vs Iteration", runs);
Logging
Plotting range=[1.0, -29.3792861874995], [3.0, 1.0130543355114285]; valueStats=DoubleSummaryStatistics{count=6, sum=32.991382, min=0.000000, average=5.498564, max=10.305150}
Plotting 3 points for GD
Only 1 points for CjGD
Plotting 2 points for LBFGS

Returns

Result

TrainingTester.java:435 executed in 0.02 seconds (0.000 gc):

    return TestUtil.compareTime(title + " vs Time", runs);
Logging
Plotting range=[0.0, -29.3792861874995], [369.068, 1.0130543355114285]; valueStats=DoubleSummaryStatistics{count=6, sum=32.991382, min=0.000000, average=5.498564, max=10.305150}
Plotting 3 points for GD
Only 1 points for CjGD
Plotting 2 points for LBFGS

Returns

Result

Results

TrainingTester.java:255 executed in 0.00 seconds (0.000 gc):

    return grid(inputLearning, modelLearning, completeLearning);

Returns

Result

TrainingTester.java:258 executed in 0.00 seconds (0.000 gc):

    return new ComponentResult(null == inputLearning ? null : inputLearning.value,
        null == modelLearning ? null : modelLearning.value, null == completeLearning ? null : completeLearning.value);

Returns

    {"input":{ "LBFGS": { "type": "NonConverged", "value": 5.644183675057867 }, "CjGD": { "type": "Converged", "value": 4.1755511922277604E-30 }, "GD": { "type": "NonConverged", "value": 1.0927139594912025 } }, "model":null, "complete":null}

LayerTests.java:425 executed in 0.00 seconds (0.000 gc):

    throwException(exceptions.addRef());

Results

detailsresult
{"input":{ "LBFGS": { "type": "NonConverged", "value": 5.644183675057867 }, "CjGD": { "type": "Converged", "value": 4.1755511922277604E-30 }, "GD": { "type": "NonConverged", "value": 1.0927139594912025 } }, "model":null, "complete":null}OK
  {
    "result": "OK",
    "performance": {
      "execution_time": "479.084",
      "gc_time": "21.715"
    },
    "created_on": 1586741367100,
    "file_name": "trainingTest",
    "report": {
      "simpleName": "Double_List",
      "canonicalName": "com.simiacryptus.mindseye.layers.cudnn.SumInputsLayerTest.Double_List",
      "link": "https://github.com/SimiaCryptus/mindseye-cudnn/tree/59d5b3318556370acb2d83ee6ec123ce0fc6974f/src/test/java/com/simiacryptus/mindseye/layers/cudnn/SumInputsLayerTest.java",
      "javaDoc": ""
    },
    "training_analysis": {
      "input": {
        "LBFGS": {
          "type": "NonConverged",
          "value": 5.644183675057867
        },
        "CjGD": {
          "type": "Converged",
          "value": 4.1755511922277604E-30
        },
        "GD": {
          "type": "NonConverged",
          "value": 1.0927139594912025
        }
      }
    },
    "archive": "s3://code.simiacrypt.us/tests/com/simiacryptus/mindseye/layers/cudnn/SumInputsLayer/Double_List/trainingTest/202004132927",
    "id": "249186a3-7ae9-4100-a5e1-0465020a8d56",
    "report_type": "Components",
    "display_name": "Comparative Training",
    "target": {
      "simpleName": "SumInputsLayer",
      "canonicalName": "com.simiacryptus.mindseye.layers.cudnn.SumInputsLayer",
      "link": "https://github.com/SimiaCryptus/mindseye-cudnn/tree/59d5b3318556370acb2d83ee6ec123ce0fc6974f/src/main/java/com/simiacryptus/mindseye/layers/cudnn/SumInputsLayer.java",
      "javaDoc": ""
    }
  }