public class BatchNormalization extends org.deeplearning4j.nn.conf.layers.BatchNormalization implements OptionHandler, java.io.Serializable
Constructor and Description |
---|
BatchNormalization()
Constructor for setting some defaults.
|
Modifier and Type | Method and Description |
---|---|
java.lang.String |
getActivationFunction() |
double |
getAdamMeanDecay() |
double |
getAdamVarDecay() |
double |
getBeta() |
double |
getBiasInit() |
double |
getBiasL1() |
double |
getBiasL2() |
double |
getBiasLearningRate() |
double |
getDecay() |
org.deeplearning4j.nn.conf.distribution.Distribution |
getDist() |
double |
getDropOut() |
double |
getEps() |
double |
getEpsilon() |
double |
getGamma() |
org.deeplearning4j.nn.conf.GradientNormalization |
getGradientNormalization() |
double |
getGradientNormalizationThreshold() |
double |
getL1() |
double |
getL2() |
java.lang.String |
getLayerName() |
double |
getLearningRate() |
java.util.Map<java.lang.Integer,java.lang.Double> |
getLearningRateSchedule() |
double |
getMomentum() |
java.util.Map<java.lang.Integer,java.lang.Double> |
getMomentumSchedule() |
int |
getNIn() |
int |
getNOut() |
java.lang.String[] |
getOptions()
Gets the current settings of the Classifier.
|
double |
getRho() |
double |
getRmsDecay() |
org.deeplearning4j.nn.conf.Updater |
getUpdater() |
org.deeplearning4j.nn.weights.WeightInit |
getWeightInit() |
java.lang.String |
globalInfo()
Global info.
|
java.util.Enumeration<Option> |
listOptions()
Returns an enumeration describing the available options.
|
void |
setActivationFunction(java.lang.String activationFunction) |
void |
setAdamMeanDecay(double adamMeanDecay) |
void |
setAdamVarDecay(double adamVarDecay) |
void |
setBeta(double beta) |
void |
setBiasInit(double biasInit) |
void |
setBiasL1(double biasL1) |
void |
setBiasL2(double biasL2) |
void |
setBiasLearningRate(double biasLearningRate) |
void |
setDecay(double decay) |
void |
setDist(org.deeplearning4j.nn.conf.distribution.Distribution dist) |
void |
setDropOut(double dropOut) |
void |
setEps(double eps) |
void |
setEpsilon(double epsilon) |
void |
setGamma(double gamma) |
void |
setGradientNormalization(org.deeplearning4j.nn.conf.GradientNormalization gradientNormalization) |
void |
setGradientNormalizationThreshold(double gradientNormalizationThreshold) |
void |
setL1(double l1) |
void |
setL2(double l2) |
void |
setLayerName(java.lang.String layerName) |
void |
setLearningRate(double learningRate) |
void |
setLearningRateSchedule(java.util.Map<java.lang.Integer,java.lang.Double> learningRateSchedule) |
void |
setMomentum(double momentum) |
void |
setMomentumSchedule(java.util.Map<java.lang.Integer,java.lang.Double> momentumSchedule) |
void |
setNIn(int nIn) |
void |
setNOut(int nOut) |
void |
setOptions(java.lang.String[] options)
Parses a given list of options.
|
void |
setRho(double rho) |
void |
setRmsDecay(double rmsDecay) |
void |
setUpdater(org.deeplearning4j.nn.conf.Updater updater) |
void |
setWeightInit(org.deeplearning4j.nn.weights.WeightInit weightInit) |
clone, equals, getL1ByParam, getL2ByParam, getLearningRateByParam, getOutputType, getPreProcessorForInputType, getUpdaterByParam, hashCode, initializer, instantiate, isLockGammaBeta, isMinibatch, setLockGammaBeta, setMinibatch, setNIn, toString
public BatchNormalization()
public java.lang.String globalInfo()
@OptionMetadata(displayName="layer name", description="The name of the layer (default = Batch normalization Layer).", commandLineParamName="name", commandLineParamSynopsis="-name <string>", displayOrder=1) public java.lang.String getLayerName()
getLayerName
in class org.deeplearning4j.nn.conf.layers.Layer
public void setLayerName(java.lang.String layerName)
setLayerName
in class org.deeplearning4j.nn.conf.layers.Layer
@OptionMetadata(displayName="name of activation function", description="The name of the activation function (default = identity; options are softmax,logsoftmax,maxout,identity,abs,cos,elu,exp,log,pow,sin,acos,asin,atan,ceil,relu,sign,sqrt,step,tanh,floor,round,hardtanh,timesoneminus,negative,softplus,softsign,leakyrelu,stabilize,sigmoid).", commandLineParamName="activation", commandLineParamSynopsis="-activation <string>", displayOrder=2) public java.lang.String getActivationFunction()
getActivationFunction
in class org.deeplearning4j.nn.conf.layers.Layer
public void setActivationFunction(java.lang.String activationFunction)
setActivationFunction
in class org.deeplearning4j.nn.conf.layers.Layer
@OptionMetadata(displayName="weight initialization method", description="The method for weight initialization (default = XAVIER).", commandLineParamName="weightInit", commandLineParamSynopsis="-weightInit <specification>", displayOrder=3) public org.deeplearning4j.nn.weights.WeightInit getWeightInit()
getWeightInit
in class org.deeplearning4j.nn.conf.layers.Layer
public void setWeightInit(org.deeplearning4j.nn.weights.WeightInit weightInit)
setWeightInit
in class org.deeplearning4j.nn.conf.layers.Layer
@OptionMetadata(displayName="bias initialization", description="The bias initialization (default = 1.0).", commandLineParamName="biasInit", commandLineParamSynopsis="-biasInit <double>", displayOrder=4) public double getBiasInit()
getBiasInit
in class org.deeplearning4j.nn.conf.layers.Layer
public void setBiasInit(double biasInit)
setBiasInit
in class org.deeplearning4j.nn.conf.layers.Layer
@OptionMetadata(displayName="distribution", description="The distribution (default = NormalDistribution(1e-3, 1)).", commandLineParamName="dist", commandLineParamSynopsis="-dist <specification>", displayOrder=5) public org.deeplearning4j.nn.conf.distribution.Distribution getDist()
getDist
in class org.deeplearning4j.nn.conf.layers.Layer
public void setDist(org.deeplearning4j.nn.conf.distribution.Distribution dist)
setDist
in class org.deeplearning4j.nn.conf.layers.Layer
@OptionMetadata(displayName="learning rate", description="The learning rate (default = 0.01).", commandLineParamName="lr", commandLineParamSynopsis="-lr <double>", displayOrder=6) public double getLearningRate()
getLearningRate
in class org.deeplearning4j.nn.conf.layers.Layer
public void setLearningRate(double learningRate)
setLearningRate
in class org.deeplearning4j.nn.conf.layers.Layer
@OptionMetadata(displayName="bias learning rate", description="The bias learning rate (default = 0.01).", commandLineParamName="blr", commandLineParamSynopsis="-blr <double>", displayOrder=7) public double getBiasLearningRate()
getBiasLearningRate
in class org.deeplearning4j.nn.conf.layers.Layer
public void setBiasLearningRate(double biasLearningRate)
setBiasLearningRate
in class org.deeplearning4j.nn.conf.layers.Layer
@OptionMetadata(displayName="learning rate schedule", description="The learning rate schedule.", commandLineParamName="lrSchedule", commandLineParamSynopsis="-lrSchedule <specification>", displayOrder=8) public java.util.Map<java.lang.Integer,java.lang.Double> getLearningRateSchedule()
getLearningRateSchedule
in class org.deeplearning4j.nn.conf.layers.Layer
public void setLearningRateSchedule(java.util.Map<java.lang.Integer,java.lang.Double> learningRateSchedule)
setLearningRateSchedule
in class org.deeplearning4j.nn.conf.layers.Layer
@OptionMetadata(displayName="momentum", description="The momentum (default = 0.9).", commandLineParamName="momentum", commandLineParamSynopsis="-momentum <double>", displayOrder=9) public double getMomentum()
getMomentum
in class org.deeplearning4j.nn.conf.layers.Layer
public void setMomentum(double momentum)
setMomentum
in class org.deeplearning4j.nn.conf.layers.Layer
@OptionMetadata(displayName="momentum schedule", description="The momentum schedule.", commandLineParamName="momentumSchedule", commandLineParamSynopsis="-momentumSchedule <specification>", displayOrder=10) public java.util.Map<java.lang.Integer,java.lang.Double> getMomentumSchedule()
getMomentumSchedule
in class org.deeplearning4j.nn.conf.layers.Layer
public void setMomentumSchedule(java.util.Map<java.lang.Integer,java.lang.Double> momentumSchedule)
setMomentumSchedule
in class org.deeplearning4j.nn.conf.layers.Layer
@OptionMetadata(displayName="L1", description="The L1 parameter (default = 0).", commandLineParamName="L1", commandLineParamSynopsis="-L1 <double>", displayOrder=11) public double getL1()
getL1
in class org.deeplearning4j.nn.conf.layers.Layer
public void setL1(double l1)
setL1
in class org.deeplearning4j.nn.conf.layers.Layer
@OptionMetadata(displayName="L2", description="The L2 parameter (default = 0).", commandLineParamName="L2", commandLineParamSynopsis="-L2 <double>", displayOrder=12) public double getL2()
getL2
in class org.deeplearning4j.nn.conf.layers.Layer
public void setL2(double l2)
setL2
in class org.deeplearning4j.nn.conf.layers.Layer
@OptionMetadata(displayName="bias L1", description="The bias L1 parameter (default = 0).", commandLineParamName="biasL1", commandLineParamSynopsis="-biasL1 <double>", displayOrder=13) public double getBiasL1()
getBiasL1
in class org.deeplearning4j.nn.conf.layers.Layer
public void setBiasL1(double biasL1)
setBiasL1
in class org.deeplearning4j.nn.conf.layers.Layer
@OptionMetadata(displayName="bias L2", description="The bias L2 parameter (default = 0).", commandLineParamName="biasL2", commandLineParamSynopsis="-biasL2 <double>", displayOrder=14) public double getBiasL2()
getBiasL2
in class org.deeplearning4j.nn.conf.layers.Layer
public void setBiasL2(double biasL2)
setBiasL2
in class org.deeplearning4j.nn.conf.layers.Layer
@OptionMetadata(displayName="dropout parameter", description="The dropout parameter (default = 0).", commandLineParamName="dropout", commandLineParamSynopsis="-dropout <double>", displayOrder=15) public double getDropOut()
getDropOut
in class org.deeplearning4j.nn.conf.layers.Layer
public void setDropOut(double dropOut)
setDropOut
in class org.deeplearning4j.nn.conf.layers.Layer
@OptionMetadata(displayName="updater for stochastic gradient descent", description="The updater for stochastic gradient descent (default NESTEROVS).", commandLineParamName="updater", commandLineParamSynopsis="-updater <speficiation>", displayOrder=16) public org.deeplearning4j.nn.conf.Updater getUpdater()
getUpdater
in class org.deeplearning4j.nn.conf.layers.Layer
public void setUpdater(org.deeplearning4j.nn.conf.Updater updater)
setUpdater
in class org.deeplearning4j.nn.conf.layers.Layer
@OptionMetadata(displayName="ADADELTA\'s rho parameter", description="ADADELTA\'s rho parameter (default = 0).", commandLineParamName="rho", commandLineParamSynopsis="-rho <double>", displayOrder=17) public double getRho()
getRho
in class org.deeplearning4j.nn.conf.layers.Layer
public void setRho(double rho)
setRho
in class org.deeplearning4j.nn.conf.layers.Layer
@OptionMetadata(displayName="ADADELTA\'s epsilon parameter", description="ADADELTA\'s epsilon parameter (default = 1e-6).", commandLineParamName="epsilon", commandLineParamSynopsis="-epsilon <double>", displayOrder=18) public double getEpsilon()
getEpsilon
in class org.deeplearning4j.nn.conf.layers.Layer
public void setEpsilon(double epsilon)
setEpsilon
in class org.deeplearning4j.nn.conf.layers.Layer
@OptionMetadata(displayName="RMSPROP\'s RMS decay parameter", description="RMSPROP\'s RMS decay parameter (default = 0.95).", commandLineParamName="rmsDecay", commandLineParamSynopsis="-rmsDecay <double>", displayOrder=19) public double getRmsDecay()
getRmsDecay
in class org.deeplearning4j.nn.conf.layers.Layer
public void setRmsDecay(double rmsDecay)
setRmsDecay
in class org.deeplearning4j.nn.conf.layers.Layer
@OptionMetadata(displayName="ADAM\'s mean decay parameter", description="ADAM\'s mean decay parameter (default 0.9).", commandLineParamName="adamMeanDecay", commandLineParamSynopsis="-adamMeanDecay <double>", displayOrder=20) public double getAdamMeanDecay()
getAdamMeanDecay
in class org.deeplearning4j.nn.conf.layers.Layer
public void setAdamMeanDecay(double adamMeanDecay)
setAdamMeanDecay
in class org.deeplearning4j.nn.conf.layers.Layer
@OptionMetadata(displayName="ADAMS\'s var decay parameter", description="ADAM\'s var decay parameter (default 0.999).", commandLineParamName="adamVarDecay", commandLineParamSynopsis="-adamVarDecay <double>", displayOrder=21) public double getAdamVarDecay()
getAdamVarDecay
in class org.deeplearning4j.nn.conf.layers.Layer
public void setAdamVarDecay(double adamVarDecay)
setAdamVarDecay
in class org.deeplearning4j.nn.conf.layers.Layer
@OptionMetadata(displayName="gradient normalization method", description="The gradient normalization method (default = None).", commandLineParamName="gradientNormalization", commandLineParamSynopsis="-gradientNormalization <specification>", displayOrder=22) public org.deeplearning4j.nn.conf.GradientNormalization getGradientNormalization()
getGradientNormalization
in class org.deeplearning4j.nn.conf.layers.Layer
public void setGradientNormalization(org.deeplearning4j.nn.conf.GradientNormalization gradientNormalization)
setGradientNormalization
in class org.deeplearning4j.nn.conf.layers.Layer
@OptionMetadata(displayName="gradient normalization threshold", description="The gradient normalization threshold (default = 1).", commandLineParamName="gradNormThreshold", commandLineParamSynopsis="-gradNormThreshold <double>", displayOrder=23) public double getGradientNormalizationThreshold()
getGradientNormalizationThreshold
in class org.deeplearning4j.nn.conf.layers.Layer
public void setGradientNormalizationThreshold(double gradientNormalizationThreshold)
setGradientNormalizationThreshold
in class org.deeplearning4j.nn.conf.layers.Layer
@ProgrammaticProperty public int getNIn()
getNIn
in class org.deeplearning4j.nn.conf.layers.FeedForwardLayer
public void setNIn(int nIn)
setNIn
in class org.deeplearning4j.nn.conf.layers.FeedForwardLayer
@ProgrammaticProperty public int getNOut()
getNOut
in class org.deeplearning4j.nn.conf.layers.FeedForwardLayer
public void setNOut(int nOut)
setNOut
in class org.deeplearning4j.nn.conf.layers.FeedForwardLayer
@OptionMetadata(displayName="decay parameter", description="The decay parameter (default = 0.9).", commandLineParamName="decay", commandLineParamSynopsis="-decay <double>", displayOrder=24) public double getDecay()
getDecay
in class org.deeplearning4j.nn.conf.layers.BatchNormalization
public void setDecay(double decay)
setDecay
in class org.deeplearning4j.nn.conf.layers.BatchNormalization
@OptionMetadata(displayName="eps parameter", description="The eps parameter (default = 1e-5).", commandLineParamName="eps", commandLineParamSynopsis="-eps <double>", displayOrder=25) public double getEps()
getEps
in class org.deeplearning4j.nn.conf.layers.BatchNormalization
public void setEps(double eps)
setEps
in class org.deeplearning4j.nn.conf.layers.BatchNormalization
@OptionMetadata(displayName="gamma parameter", description="The gamma parameter (default = 1).", commandLineParamName="gamma", commandLineParamSynopsis="-gamma <double>", displayOrder=26) public double getGamma()
getGamma
in class org.deeplearning4j.nn.conf.layers.BatchNormalization
public void setGamma(double gamma)
setGamma
in class org.deeplearning4j.nn.conf.layers.BatchNormalization
@OptionMetadata(displayName="beta parameter", description="The beta parameter (default = 0).", commandLineParamName="beta", commandLineParamSynopsis="-beta <double>", displayOrder=27) public double getBeta()
getBeta
in class org.deeplearning4j.nn.conf.layers.BatchNormalization
public void setBeta(double beta)
setBeta
in class org.deeplearning4j.nn.conf.layers.BatchNormalization
public java.util.Enumeration<Option> listOptions()
listOptions
in interface OptionHandler
public java.lang.String[] getOptions()
getOptions
in interface OptionHandler
public void setOptions(java.lang.String[] options) throws java.lang.Exception
setOptions
in interface OptionHandler
options
- the list of options as an array of stringsjava.lang.Exception
- if an option is not supported