public class BatchNormalization extends org.deeplearning4j.nn.conf.layers.BatchNormalization implements OptionHandler, java.io.Serializable
| Constructor and Description |
|---|
BatchNormalization()
Constructor for setting some defaults.
|
| Modifier and Type | Method and Description |
|---|---|
org.nd4j.linalg.activations.IActivation |
getActivationFn() |
org.nd4j.linalg.activations.IActivation |
getActivationFunction() |
double |
getAdamMeanDecay() |
double |
getAdamVarDecay() |
double |
getBeta() |
double |
getBiasInit() |
double |
getBiasL1() |
double |
getBiasL2() |
double |
getBiasLearningRate() |
double |
getDecay() |
org.deeplearning4j.nn.conf.distribution.Distribution |
getDist() |
double |
getDropOut() |
double |
getEps() |
double |
getEpsilon() |
double |
getGamma() |
org.deeplearning4j.nn.conf.GradientNormalization |
getGradientNormalization() |
double |
getGradientNormalizationThreshold() |
double |
getL1() |
double |
getL1Bias() |
double |
getL2() |
double |
getL2Bias() |
java.lang.String |
getLayerName() |
double |
getLearningRate() |
java.util.Map<java.lang.Integer,java.lang.Double> |
getLearningRateSchedule() |
boolean |
getLockGammaAndBeta() |
double |
getMomentum() |
java.util.Map<java.lang.Integer,java.lang.Double> |
getMomentumSchedule() |
int |
getNIn() |
boolean |
getNoMinibatch() |
int |
getNOut() |
java.lang.String[] |
getOptions()
Gets the current settings of the Classifier.
|
double |
getRho() |
double |
getRmsDecay() |
org.deeplearning4j.nn.conf.Updater |
getUpdater() |
org.deeplearning4j.nn.weights.WeightInit |
getWeightInit() |
java.lang.String |
globalInfo()
Global info.
|
boolean |
isLockGammaBeta() |
boolean |
isMinibatch() |
java.util.Enumeration<Option> |
listOptions()
Returns an enumeration describing the available options.
|
void |
setActivationFn(org.nd4j.linalg.activations.IActivation fn) |
void |
setActivationFunction(org.nd4j.linalg.activations.IActivation activationFn) |
void |
setAdamMeanDecay(double adamMeanDecay) |
void |
setAdamVarDecay(double adamVarDecay) |
void |
setBeta(double beta) |
void |
setBiasInit(double biasInit) |
void |
setBiasL1(double biasL1) |
void |
setBiasL2(double biasL2) |
void |
setBiasLearningRate(double biasLearningRate) |
void |
setDecay(double decay) |
void |
setDist(org.deeplearning4j.nn.conf.distribution.Distribution dist) |
void |
setDropOut(double dropOut) |
void |
setEps(double eps) |
void |
setEpsilon(double epsilon) |
void |
setGamma(double gamma) |
void |
setGradientNormalization(org.deeplearning4j.nn.conf.GradientNormalization gradientNormalization) |
void |
setGradientNormalizationThreshold(double gradientNormalizationThreshold) |
void |
setL1(double l1) |
void |
setL1Bias(int l1bias) |
void |
setL2(double l2) |
void |
setL2Bias(int l2bias) |
void |
setLayerName(java.lang.String layerName) |
void |
setLearningRate(double learningRate) |
void |
setLearningRateSchedule(java.util.Map<java.lang.Integer,java.lang.Double> learningRateSchedule) |
void |
setLockGammaAndBeta(boolean lgb) |
void |
setLockGammaBeta(boolean lgb) |
void |
setMinibatch(boolean b) |
void |
setMomentum(double momentum) |
void |
setMomentumSchedule(java.util.Map<java.lang.Integer,java.lang.Double> momentumSchedule) |
void |
setNIn(int nIn) |
void |
setNoMinibatch(boolean b) |
void |
setNOut(int nOut) |
void |
setOptions(java.lang.String[] options)
Parses a given list of options.
|
void |
setRho(double rho) |
void |
setRmsDecay(double rmsDecay) |
void |
setUpdater(org.deeplearning4j.nn.conf.Updater updater) |
void |
setWeightInit(org.deeplearning4j.nn.weights.WeightInit weightInit) |
clone, equals, getL1ByParam, getL2ByParam, getLearningRateByParam, getOutputType, getPreProcessorForInputType, getUpdaterByParam, hashCode, initializer, instantiate, setNIn, toStringpublic BatchNormalization()
public java.lang.String globalInfo()
@OptionMetadata(displayName="layer name", description="The name of the layer (default = Batch normalization Layer).", commandLineParamName="name", commandLineParamSynopsis="-name <string>", displayOrder=0) public java.lang.String getLayerName()
getLayerName in class org.deeplearning4j.nn.conf.layers.Layerpublic void setLayerName(java.lang.String layerName)
setLayerName in class org.deeplearning4j.nn.conf.layers.Layer@OptionMetadata(displayName="decay parameter", description="The decay parameter (default = 0.9).", commandLineParamName="decay", commandLineParamSynopsis="-decay <double>", displayOrder=1) public double getDecay()
getDecay in class org.deeplearning4j.nn.conf.layers.BatchNormalizationpublic void setDecay(double decay)
setDecay in class org.deeplearning4j.nn.conf.layers.BatchNormalization@OptionMetadata(displayName="eps parameter", description="The eps parameter (default = 1e-5).", commandLineParamName="eps", commandLineParamSynopsis="-eps <double>", displayOrder=2) public double getEps()
getEps in class org.deeplearning4j.nn.conf.layers.BatchNormalizationpublic void setEps(double eps)
setEps in class org.deeplearning4j.nn.conf.layers.BatchNormalization@OptionMetadata(displayName="gamma parameter", description="The gamma parameter (default = 1).", commandLineParamName="gamma", commandLineParamSynopsis="-gamma <double>", displayOrder=3) public double getGamma()
getGamma in class org.deeplearning4j.nn.conf.layers.BatchNormalizationpublic void setGamma(double gamma)
setGamma in class org.deeplearning4j.nn.conf.layers.BatchNormalization@OptionMetadata(displayName="beta parameter", description="The beta parameter (default = 0).", commandLineParamName="beta", commandLineParamSynopsis="-beta <double>", displayOrder=4) public double getBeta()
getBeta in class org.deeplearning4j.nn.conf.layers.BatchNormalizationpublic void setBeta(double beta)
setBeta in class org.deeplearning4j.nn.conf.layers.BatchNormalization@OptionMetadata(displayName="lock gamma and beta", description="Whether to lock gamma and beta.", commandLineParamName="beta", commandLineParamSynopsis="-lockGammaBeta", displayOrder=5) public boolean getLockGammaAndBeta()
public void setLockGammaAndBeta(boolean lgb)
@ProgrammaticProperty public boolean isLockGammaBeta()
isLockGammaBeta in class org.deeplearning4j.nn.conf.layers.BatchNormalizationpublic void setLockGammaBeta(boolean lgb)
setLockGammaBeta in class org.deeplearning4j.nn.conf.layers.BatchNormalization@OptionMetadata(displayName="noMinibatch", description="Whether minibatches are not not used.", commandLineParamName="noMinibatch", commandLineParamSynopsis="-noMinibatch", displayOrder=6) public boolean getNoMinibatch()
public void setNoMinibatch(boolean b)
@ProgrammaticProperty public boolean isMinibatch()
isMinibatch in class org.deeplearning4j.nn.conf.layers.BatchNormalizationpublic void setMinibatch(boolean b)
setMinibatch in class org.deeplearning4j.nn.conf.layers.BatchNormalization@OptionMetadata(displayName="activation function", description="The activation function to use (default = Identity).", commandLineParamName="activation", commandLineParamSynopsis="-activation <specification>", displayOrder=7) public org.nd4j.linalg.activations.IActivation getActivationFunction()
public void setActivationFunction(org.nd4j.linalg.activations.IActivation activationFn)
@ProgrammaticProperty public org.nd4j.linalg.activations.IActivation getActivationFn()
getActivationFn in class org.deeplearning4j.nn.conf.layers.Layerpublic void setActivationFn(org.nd4j.linalg.activations.IActivation fn)
setActivationFn in class org.deeplearning4j.nn.conf.layers.Layer@OptionMetadata(displayName="weight initialization method", description="The method for weight initialization (default = XAVIER).", commandLineParamName="weightInit", commandLineParamSynopsis="-weightInit <specification>", displayOrder=8) public org.deeplearning4j.nn.weights.WeightInit getWeightInit()
getWeightInit in class org.deeplearning4j.nn.conf.layers.Layerpublic void setWeightInit(org.deeplearning4j.nn.weights.WeightInit weightInit)
setWeightInit in class org.deeplearning4j.nn.conf.layers.Layer@OptionMetadata(displayName="bias initialization", description="The bias initialization (default = 1.0).", commandLineParamName="biasInit", commandLineParamSynopsis="-biasInit <double>", displayOrder=9) public double getBiasInit()
getBiasInit in class org.deeplearning4j.nn.conf.layers.Layerpublic void setBiasInit(double biasInit)
setBiasInit in class org.deeplearning4j.nn.conf.layers.Layer@OptionMetadata(displayName="distribution", description="The distribution (default = NormalDistribution(1e-3, 1)).", commandLineParamName="dist", commandLineParamSynopsis="-dist <specification>", displayOrder=10) public org.deeplearning4j.nn.conf.distribution.Distribution getDist()
getDist in class org.deeplearning4j.nn.conf.layers.Layerpublic void setDist(org.deeplearning4j.nn.conf.distribution.Distribution dist)
setDist in class org.deeplearning4j.nn.conf.layers.Layer@OptionMetadata(displayName="learning rate", description="The learning rate (default = 0.01).", commandLineParamName="lr", commandLineParamSynopsis="-lr <double>", displayOrder=11) public double getLearningRate()
getLearningRate in class org.deeplearning4j.nn.conf.layers.Layerpublic void setLearningRate(double learningRate)
setLearningRate in class org.deeplearning4j.nn.conf.layers.Layer@OptionMetadata(displayName="bias learning rate", description="The bias learning rate (default = 0.01).", commandLineParamName="blr", commandLineParamSynopsis="-blr <double>", displayOrder=12) public double getBiasLearningRate()
getBiasLearningRate in class org.deeplearning4j.nn.conf.layers.Layerpublic void setBiasLearningRate(double biasLearningRate)
setBiasLearningRate in class org.deeplearning4j.nn.conf.layers.Layer@OptionMetadata(displayName="learning rate schedule", description="The learning rate schedule.", commandLineParamName="lrSchedule", commandLineParamSynopsis="-lrSchedule <specification>", displayOrder=13) public java.util.Map<java.lang.Integer,java.lang.Double> getLearningRateSchedule()
getLearningRateSchedule in class org.deeplearning4j.nn.conf.layers.Layerpublic void setLearningRateSchedule(java.util.Map<java.lang.Integer,java.lang.Double> learningRateSchedule)
setLearningRateSchedule in class org.deeplearning4j.nn.conf.layers.Layer@OptionMetadata(displayName="momentum", description="The momentum (default = 0.9).", commandLineParamName="momentum", commandLineParamSynopsis="-momentum <double>", displayOrder=14) public double getMomentum()
getMomentum in class org.deeplearning4j.nn.conf.layers.Layerpublic void setMomentum(double momentum)
setMomentum in class org.deeplearning4j.nn.conf.layers.Layer@OptionMetadata(displayName="momentum schedule", description="The momentum schedule.", commandLineParamName="momentumSchedule", commandLineParamSynopsis="-momentumSchedule <specification>", displayOrder=15) public java.util.Map<java.lang.Integer,java.lang.Double> getMomentumSchedule()
getMomentumSchedule in class org.deeplearning4j.nn.conf.layers.Layerpublic void setMomentumSchedule(java.util.Map<java.lang.Integer,java.lang.Double> momentumSchedule)
setMomentumSchedule in class org.deeplearning4j.nn.conf.layers.Layer@OptionMetadata(displayName="L1", description="The L1 parameter (default = 0).", commandLineParamName="L1", commandLineParamSynopsis="-L1 <double>", displayOrder=16) public double getL1()
getL1 in class org.deeplearning4j.nn.conf.layers.Layerpublic void setL1(double l1)
setL1 in class org.deeplearning4j.nn.conf.layers.Layer@OptionMetadata(displayName="L2", description="The L2 parameter (default = 0).", commandLineParamName="L2", commandLineParamSynopsis="-L2 <double>", displayOrder=17) public double getL2()
getL2 in class org.deeplearning4j.nn.conf.layers.Layerpublic void setL2(double l2)
setL2 in class org.deeplearning4j.nn.conf.layers.Layer@OptionMetadata(displayName="L1 bias", description="The L1 bias parameter (default = 0).", commandLineParamName="l1Bias", commandLineParamSynopsis="-l1Bias <double>", displayOrder=18) public double getBiasL1()
public void setBiasL1(double biasL1)
@OptionMetadata(displayName="L2 bias", description="The L2 bias parameter (default = 0).", commandLineParamName="l2Bias", commandLineParamSynopsis="-l2Bias <double>", displayOrder=19) public double getBiasL2()
public void setBiasL2(double biasL2)
@OptionMetadata(displayName="dropout parameter", description="The dropout parameter (default = 0).", commandLineParamName="dropout", commandLineParamSynopsis="-dropout <double>", displayOrder=20) public double getDropOut()
getDropOut in class org.deeplearning4j.nn.conf.layers.Layerpublic void setDropOut(double dropOut)
setDropOut in class org.deeplearning4j.nn.conf.layers.Layer@OptionMetadata(displayName="updater for stochastic gradient descent", description="The updater for stochastic gradient descent (default NESTEROVS).", commandLineParamName="updater", commandLineParamSynopsis="-updater <speficiation>", displayOrder=21) public org.deeplearning4j.nn.conf.Updater getUpdater()
getUpdater in class org.deeplearning4j.nn.conf.layers.Layerpublic void setUpdater(org.deeplearning4j.nn.conf.Updater updater)
setUpdater in class org.deeplearning4j.nn.conf.layers.Layer@OptionMetadata(displayName="ADADELTA\'s rho parameter", description="ADADELTA\'s rho parameter (default = 0).", commandLineParamName="rho", commandLineParamSynopsis="-rho <double>", displayOrder=22) public double getRho()
getRho in class org.deeplearning4j.nn.conf.layers.Layerpublic void setRho(double rho)
setRho in class org.deeplearning4j.nn.conf.layers.Layer@OptionMetadata(displayName="ADADELTA\'s epsilon parameter", description="ADADELTA\'s epsilon parameter (default = 1e-6).", commandLineParamName="epsilon", commandLineParamSynopsis="-epsilon <double>", displayOrder=23) public double getEpsilon()
getEpsilon in class org.deeplearning4j.nn.conf.layers.Layerpublic void setEpsilon(double epsilon)
setEpsilon in class org.deeplearning4j.nn.conf.layers.Layer@OptionMetadata(displayName="RMSPROP\'s RMS decay parameter", description="RMSPROP\'s RMS decay parameter (default = 0.95).", commandLineParamName="rmsDecay", commandLineParamSynopsis="-rmsDecay <double>", displayOrder=24) public double getRmsDecay()
getRmsDecay in class org.deeplearning4j.nn.conf.layers.Layerpublic void setRmsDecay(double rmsDecay)
setRmsDecay in class org.deeplearning4j.nn.conf.layers.Layer@OptionMetadata(displayName="ADAM\'s mean decay parameter", description="ADAM\'s mean decay parameter (default 0.9).", commandLineParamName="adamMeanDecay", commandLineParamSynopsis="-adamMeanDecay <double>", displayOrder=25) public double getAdamMeanDecay()
getAdamMeanDecay in class org.deeplearning4j.nn.conf.layers.Layerpublic void setAdamMeanDecay(double adamMeanDecay)
setAdamMeanDecay in class org.deeplearning4j.nn.conf.layers.Layer@OptionMetadata(displayName="ADAMS\'s var decay parameter", description="ADAM\'s var decay parameter (default 0.999).", commandLineParamName="adamVarDecay", commandLineParamSynopsis="-adamVarDecay <double>", displayOrder=26) public double getAdamVarDecay()
getAdamVarDecay in class org.deeplearning4j.nn.conf.layers.Layerpublic void setAdamVarDecay(double adamVarDecay)
setAdamVarDecay in class org.deeplearning4j.nn.conf.layers.Layer@OptionMetadata(displayName="gradient normalization method", description="The gradient normalization method (default = None).", commandLineParamName="gradientNormalization", commandLineParamSynopsis="-gradientNormalization <specification>", displayOrder=27) public org.deeplearning4j.nn.conf.GradientNormalization getGradientNormalization()
getGradientNormalization in class org.deeplearning4j.nn.conf.layers.Layerpublic void setGradientNormalization(org.deeplearning4j.nn.conf.GradientNormalization gradientNormalization)
setGradientNormalization in class org.deeplearning4j.nn.conf.layers.Layer@OptionMetadata(displayName="gradient normalization threshold", description="The gradient normalization threshold (default = 1).", commandLineParamName="gradNormThreshold", commandLineParamSynopsis="-gradNormThreshold <double>", displayOrder=28) public double getGradientNormalizationThreshold()
getGradientNormalizationThreshold in class org.deeplearning4j.nn.conf.layers.Layerpublic void setGradientNormalizationThreshold(double gradientNormalizationThreshold)
setGradientNormalizationThreshold in class org.deeplearning4j.nn.conf.layers.Layer@ProgrammaticProperty public int getNIn()
getNIn in class org.deeplearning4j.nn.conf.layers.FeedForwardLayerpublic void setNIn(int nIn)
setNIn in class org.deeplearning4j.nn.conf.layers.FeedForwardLayer@ProgrammaticProperty public int getNOut()
getNOut in class org.deeplearning4j.nn.conf.layers.FeedForwardLayerpublic void setNOut(int nOut)
setNOut in class org.deeplearning4j.nn.conf.layers.FeedForwardLayer@ProgrammaticProperty public double getL1Bias()
getL1Bias in class org.deeplearning4j.nn.conf.layers.Layerpublic void setL1Bias(int l1bias)
@ProgrammaticProperty public double getL2Bias()
getL2Bias in class org.deeplearning4j.nn.conf.layers.Layerpublic void setL2Bias(int l2bias)
public java.util.Enumeration<Option> listOptions()
listOptions in interface OptionHandlerpublic java.lang.String[] getOptions()
getOptions in interface OptionHandlerpublic void setOptions(java.lang.String[] options)
throws java.lang.Exception
setOptions in interface OptionHandleroptions - the list of options as an array of stringsjava.lang.Exception - if an option is not supported