| Package | Description |
|---|---|
| de.jungblut.classification.nn |
| Modifier and Type | Method and Description |
|---|---|
static de.jungblut.math.DoubleMatrix[] |
MultilayerPerceptronCostFunction.backwardPropagate(de.jungblut.math.DoubleMatrix y,
de.jungblut.math.DoubleMatrix[] thetas,
de.jungblut.math.DoubleMatrix[] ax,
de.jungblut.math.DoubleMatrix[] zx,
MultilayerPerceptronCostFunction.NetworkConfiguration conf) |
static void |
MultilayerPerceptronCostFunction.calculateGradients(de.jungblut.math.DoubleMatrix[] thetas,
de.jungblut.math.DoubleMatrix[] thetaGradients,
de.jungblut.math.DoubleMatrix[] ax,
de.jungblut.math.DoubleMatrix[] deltaX,
int m,
MultilayerPerceptronCostFunction.NetworkConfiguration conf) |
static double |
MultilayerPerceptronCostFunction.calculateRegularization(de.jungblut.math.DoubleMatrix[] thetas,
int m,
MultilayerPerceptronCostFunction.NetworkConfiguration conf) |
static CostGradientTuple |
MultilayerPerceptronCostFunction.computeNextStep(de.jungblut.math.DoubleVector input,
de.jungblut.math.DoubleMatrix x,
de.jungblut.math.DoubleMatrix y,
MultilayerPerceptronCostFunction.NetworkConfiguration conf)
Do a full forward pass and backpropagate the error.
|
static void |
MultilayerPerceptronCostFunction.dropoutVisibleLayer(de.jungblut.math.DoubleMatrix x,
de.jungblut.math.DoubleMatrix[] ax,
MultilayerPerceptronCostFunction.NetworkConfiguration conf) |
static void |
MultilayerPerceptronCostFunction.forwardPropagate(de.jungblut.math.DoubleMatrix[] thetas,
de.jungblut.math.DoubleMatrix[] ax,
de.jungblut.math.DoubleMatrix[] zx,
MultilayerPerceptronCostFunction.NetworkConfiguration conf) |
Copyright © 2016. All rights reserved.