public class OutputLayer extends org.deeplearning4j.nn.conf.layers.OutputLayer implements OptionHandler, java.io.Serializable
Constructor and Description |
---|
OutputLayer()
Constructor for setting some defaults.
|
Modifier and Type | Method and Description |
---|---|
org.nd4j.linalg.activations.IActivation |
getActivationFn() |
org.nd4j.linalg.activations.IActivation |
getActivationFunction() |
double |
getAdamMeanDecay() |
double |
getAdamVarDecay() |
double |
getBiasInit() |
double |
getBiasL1() |
double |
getBiasL2() |
double |
getBiasLearningRate() |
org.deeplearning4j.nn.conf.distribution.Distribution |
getDist() |
double |
getDropOut() |
double |
getEpsilon() |
org.deeplearning4j.nn.conf.GradientNormalization |
getGradientNormalization() |
double |
getGradientNormalizationThreshold() |
double |
getL1() |
double |
getL1Bias() |
double |
getL2() |
double |
getL2Bias() |
java.lang.String |
getLayerName() |
double |
getLearningRate() |
java.util.Map<java.lang.Integer,java.lang.Double> |
getLearningRateSchedule() |
org.nd4j.linalg.lossfunctions.ILossFunction |
getLossFn() |
double |
getMomentum() |
java.util.Map<java.lang.Integer,java.lang.Double> |
getMomentumSchedule() |
int |
getNIn() |
int |
getNOut() |
java.lang.String[] |
getOptions()
Gets the current settings of the Classifier.
|
double |
getRho() |
double |
getRmsDecay() |
org.deeplearning4j.nn.conf.Updater |
getUpdater() |
org.deeplearning4j.nn.weights.WeightInit |
getWeightInit() |
java.lang.String |
globalInfo()
Global info.
|
java.util.Enumeration<Option> |
listOptions()
Returns an enumeration describing the available options.
|
void |
setActivationFn(org.nd4j.linalg.activations.IActivation fn) |
void |
setActivationFunction(org.nd4j.linalg.activations.IActivation activationFn) |
void |
setAdamMeanDecay(double adamMeanDecay) |
void |
setAdamVarDecay(double adamVarDecay) |
void |
setBiasInit(double biasInit) |
void |
setBiasL1(double biasL1) |
void |
setBiasL2(double biasL2) |
void |
setBiasLearningRate(double biasLearningRate) |
void |
setDist(org.deeplearning4j.nn.conf.distribution.Distribution dist) |
void |
setDropOut(double dropOut) |
void |
setEpsilon(double epsilon) |
void |
setGradientNormalization(org.deeplearning4j.nn.conf.GradientNormalization gradientNormalization) |
void |
setGradientNormalizationThreshold(double gradientNormalizationThreshold) |
void |
setL1(double l1) |
void |
setL1Bias(int l1bias) |
void |
setL2(double l2) |
void |
setL2Bias(int l2bias) |
void |
setLayerName(java.lang.String layerName) |
void |
setLearningRate(double learningRate) |
void |
setLearningRateSchedule(java.util.Map<java.lang.Integer,java.lang.Double> learningRateSchedule) |
void |
setLossFn(org.nd4j.linalg.lossfunctions.ILossFunction lossFn) |
void |
setMomentum(double momentum) |
void |
setMomentumSchedule(java.util.Map<java.lang.Integer,java.lang.Double> momentumSchedule) |
void |
setNIn(int nIn) |
void |
setNOut(int nOut) |
void |
setOptions(java.lang.String[] options)
Parses a given list of options.
|
void |
setRho(double rho) |
void |
setRmsDecay(double rmsDecay) |
void |
setUpdater(org.deeplearning4j.nn.conf.Updater updater) |
void |
setWeightInit(org.deeplearning4j.nn.weights.WeightInit weightInit) |
equals, hashCode, initializer, instantiate, toString
getL1ByParam, getL2ByParam, getLearningRateByParam, getOutputType, getPreProcessorForInputType, setNIn
public java.lang.String globalInfo()
@OptionMetadata(displayName="layer name", description="The name of the layer (default = Output Layer).", commandLineParamName="name", commandLineParamSynopsis="-name <string>", displayOrder=0) public java.lang.String getLayerName()
getLayerName
in class org.deeplearning4j.nn.conf.layers.Layer
public void setLayerName(java.lang.String layerName)
setLayerName
in class org.deeplearning4j.nn.conf.layers.Layer
@OptionMetadata(displayName="loss function", description="The loss function to use (default = LossMCXENT).", commandLineParamName="lossFn", commandLineParamSynopsis="-lossFn <specification>", displayOrder=1) public org.nd4j.linalg.lossfunctions.ILossFunction getLossFn()
getLossFn
in class org.deeplearning4j.nn.conf.layers.BaseOutputLayer
public void setLossFn(org.nd4j.linalg.lossfunctions.ILossFunction lossFn)
setLossFn
in class org.deeplearning4j.nn.conf.layers.BaseOutputLayer
@OptionMetadata(displayName="activation function", description="The activation function to use (default = ActivationSoftmax).", commandLineParamName="activation", commandLineParamSynopsis="-activation <specification>", displayOrder=2) public org.nd4j.linalg.activations.IActivation getActivationFunction()
public void setActivationFunction(org.nd4j.linalg.activations.IActivation activationFn)
@ProgrammaticProperty public org.nd4j.linalg.activations.IActivation getActivationFn()
getActivationFn
in class org.deeplearning4j.nn.conf.layers.Layer
public void setActivationFn(org.nd4j.linalg.activations.IActivation fn)
setActivationFn
in class org.deeplearning4j.nn.conf.layers.Layer
@OptionMetadata(displayName="weight initialization method", description="The method for weight initialization (default = XAVIER).", commandLineParamName="weightInit", commandLineParamSynopsis="-weightInit <specification>", displayOrder=3) public org.deeplearning4j.nn.weights.WeightInit getWeightInit()
getWeightInit
in class org.deeplearning4j.nn.conf.layers.Layer
public void setWeightInit(org.deeplearning4j.nn.weights.WeightInit weightInit)
setWeightInit
in class org.deeplearning4j.nn.conf.layers.Layer
@OptionMetadata(displayName="bias initialization", description="The bias initialization (default = 1.0).", commandLineParamName="biasInit", commandLineParamSynopsis="-biasInit <double>", displayOrder=4) public double getBiasInit()
getBiasInit
in class org.deeplearning4j.nn.conf.layers.Layer
public void setBiasInit(double biasInit)
setBiasInit
in class org.deeplearning4j.nn.conf.layers.Layer
@OptionMetadata(displayName="distribution", description="The distribution (default = NormalDistribution(1e-3, 1)).", commandLineParamName="dist", commandLineParamSynopsis="-dist <specification>", displayOrder=5) public org.deeplearning4j.nn.conf.distribution.Distribution getDist()
getDist
in class org.deeplearning4j.nn.conf.layers.Layer
public void setDist(org.deeplearning4j.nn.conf.distribution.Distribution dist)
setDist
in class org.deeplearning4j.nn.conf.layers.Layer
@OptionMetadata(displayName="learning rate", description="The learning rate (default = 0.01).", commandLineParamName="lr", commandLineParamSynopsis="-lr <double>", displayOrder=6) public double getLearningRate()
getLearningRate
in class org.deeplearning4j.nn.conf.layers.Layer
public void setLearningRate(double learningRate)
setLearningRate
in class org.deeplearning4j.nn.conf.layers.Layer
@OptionMetadata(displayName="bias learning rate", description="The bias learning rate (default = 0.01).", commandLineParamName="blr", commandLineParamSynopsis="-blr <double>", displayOrder=7) public double getBiasLearningRate()
getBiasLearningRate
in class org.deeplearning4j.nn.conf.layers.Layer
public void setBiasLearningRate(double biasLearningRate)
setBiasLearningRate
in class org.deeplearning4j.nn.conf.layers.Layer
@OptionMetadata(displayName="learning rate schedule", description="The learning rate schedule.", commandLineParamName="lrSchedule", commandLineParamSynopsis="-lrSchedule <specification>", displayOrder=8) public java.util.Map<java.lang.Integer,java.lang.Double> getLearningRateSchedule()
getLearningRateSchedule
in class org.deeplearning4j.nn.conf.layers.Layer
public void setLearningRateSchedule(java.util.Map<java.lang.Integer,java.lang.Double> learningRateSchedule)
setLearningRateSchedule
in class org.deeplearning4j.nn.conf.layers.Layer
@OptionMetadata(displayName="momentum", description="The momentum (default = 0.9).", commandLineParamName="momentum", commandLineParamSynopsis="-momentum <double>", displayOrder=9) public double getMomentum()
getMomentum
in class org.deeplearning4j.nn.conf.layers.Layer
public void setMomentum(double momentum)
setMomentum
in class org.deeplearning4j.nn.conf.layers.Layer
@OptionMetadata(displayName="momentum schedule", description="The momentum schedule.", commandLineParamName="momentumSchedule", commandLineParamSynopsis="-momentumSchedule <specification>", displayOrder=10) public java.util.Map<java.lang.Integer,java.lang.Double> getMomentumSchedule()
getMomentumSchedule
in class org.deeplearning4j.nn.conf.layers.Layer
public void setMomentumSchedule(java.util.Map<java.lang.Integer,java.lang.Double> momentumSchedule)
setMomentumSchedule
in class org.deeplearning4j.nn.conf.layers.Layer
@OptionMetadata(displayName="L1", description="The L1 parameter (default = 0).", commandLineParamName="L1", commandLineParamSynopsis="-L1 <double>", displayOrder=11) public double getL1()
getL1
in class org.deeplearning4j.nn.conf.layers.Layer
public void setL1(double l1)
setL1
in class org.deeplearning4j.nn.conf.layers.Layer
@OptionMetadata(displayName="L2", description="The L2 parameter (default = 0).", commandLineParamName="L2", commandLineParamSynopsis="-L2 <double>", displayOrder=12) public double getL2()
getL2
in class org.deeplearning4j.nn.conf.layers.Layer
public void setL2(double l2)
setL2
in class org.deeplearning4j.nn.conf.layers.Layer
@OptionMetadata(displayName="L1 bias", description="The L1 bias parameter (default = 0).", commandLineParamName="l1Bias", commandLineParamSynopsis="-l1Bias <double>", displayOrder=13) public double getBiasL1()
public void setBiasL1(double biasL1)
@OptionMetadata(displayName="L2 bias", description="The L2 bias parameter (default = 0).", commandLineParamName="l2Bias", commandLineParamSynopsis="-l2Bias <double>", displayOrder=14) public double getBiasL2()
public void setBiasL2(double biasL2)
@OptionMetadata(displayName="dropout parameter", description="The dropout parameter (default = 0).", commandLineParamName="dropout", commandLineParamSynopsis="-dropout <double>", displayOrder=15) public double getDropOut()
getDropOut
in class org.deeplearning4j.nn.conf.layers.Layer
public void setDropOut(double dropOut)
setDropOut
in class org.deeplearning4j.nn.conf.layers.Layer
@OptionMetadata(displayName="updater for stochastic gradient descent", description="The updater for stochastic gradient descent (default NESTEROVS).", commandLineParamName="updater", commandLineParamSynopsis="-updater <speficiation>", displayOrder=16) public org.deeplearning4j.nn.conf.Updater getUpdater()
getUpdater
in class org.deeplearning4j.nn.conf.layers.Layer
public void setUpdater(org.deeplearning4j.nn.conf.Updater updater)
setUpdater
in class org.deeplearning4j.nn.conf.layers.Layer
@OptionMetadata(displayName="ADADELTA\'s rho parameter", description="ADADELTA\'s rho parameter (default = 0).", commandLineParamName="rho", commandLineParamSynopsis="-rho <double>", displayOrder=17) public double getRho()
getRho
in class org.deeplearning4j.nn.conf.layers.Layer
public void setRho(double rho)
setRho
in class org.deeplearning4j.nn.conf.layers.Layer
@OptionMetadata(displayName="ADADELTA\'s epsilon parameter", description="ADADELTA\'s epsilon parameter (default = 1e-6).", commandLineParamName="epsilon", commandLineParamSynopsis="-epsilon <double>", displayOrder=18) public double getEpsilon()
getEpsilon
in class org.deeplearning4j.nn.conf.layers.Layer
public void setEpsilon(double epsilon)
setEpsilon
in class org.deeplearning4j.nn.conf.layers.Layer
@OptionMetadata(displayName="RMSPROP\'s RMS decay parameter", description="RMSPROP\'s RMS decay parameter (default = 0.95).", commandLineParamName="rmsDecay", commandLineParamSynopsis="-rmsDecay <double>", displayOrder=19) public double getRmsDecay()
getRmsDecay
in class org.deeplearning4j.nn.conf.layers.Layer
public void setRmsDecay(double rmsDecay)
setRmsDecay
in class org.deeplearning4j.nn.conf.layers.Layer
@OptionMetadata(displayName="ADAM\'s mean decay parameter", description="ADAM\'s mean decay parameter (default 0.9).", commandLineParamName="adamMeanDecay", commandLineParamSynopsis="-adamMeanDecay <double>", displayOrder=20) public double getAdamMeanDecay()
getAdamMeanDecay
in class org.deeplearning4j.nn.conf.layers.Layer
public void setAdamMeanDecay(double adamMeanDecay)
setAdamMeanDecay
in class org.deeplearning4j.nn.conf.layers.Layer
@OptionMetadata(displayName="ADAMS\'s var decay parameter", description="ADAM\'s var decay parameter (default 0.999).", commandLineParamName="adamVarDecay", commandLineParamSynopsis="-adamVarDecay <double>", displayOrder=21) public double getAdamVarDecay()
getAdamVarDecay
in class org.deeplearning4j.nn.conf.layers.Layer
public void setAdamVarDecay(double adamVarDecay)
setAdamVarDecay
in class org.deeplearning4j.nn.conf.layers.Layer
@OptionMetadata(displayName="gradient normalization method", description="The gradient normalization method (default = None).", commandLineParamName="gradientNormalization", commandLineParamSynopsis="-gradientNormalization <specification>", displayOrder=22) public org.deeplearning4j.nn.conf.GradientNormalization getGradientNormalization()
getGradientNormalization
in class org.deeplearning4j.nn.conf.layers.Layer
public void setGradientNormalization(org.deeplearning4j.nn.conf.GradientNormalization gradientNormalization)
setGradientNormalization
in class org.deeplearning4j.nn.conf.layers.Layer
@OptionMetadata(displayName="gradient normalization threshold", description="The gradient normalization threshold (default = 1).", commandLineParamName="gradNormThreshold", commandLineParamSynopsis="-gradNormThreshold <double>", displayOrder=23) public double getGradientNormalizationThreshold()
getGradientNormalizationThreshold
in class org.deeplearning4j.nn.conf.layers.Layer
public void setGradientNormalizationThreshold(double gradientNormalizationThreshold)
setGradientNormalizationThreshold
in class org.deeplearning4j.nn.conf.layers.Layer
@ProgrammaticProperty public int getNIn()
getNIn
in class org.deeplearning4j.nn.conf.layers.FeedForwardLayer
public void setNIn(int nIn)
setNIn
in class org.deeplearning4j.nn.conf.layers.FeedForwardLayer
@ProgrammaticProperty public int getNOut()
getNOut
in class org.deeplearning4j.nn.conf.layers.FeedForwardLayer
public void setNOut(int nOut)
setNOut
in class org.deeplearning4j.nn.conf.layers.FeedForwardLayer
@ProgrammaticProperty public double getL1Bias()
getL1Bias
in class org.deeplearning4j.nn.conf.layers.Layer
public void setL1Bias(int l1bias)
@ProgrammaticProperty public double getL2Bias()
getL2Bias
in class org.deeplearning4j.nn.conf.layers.Layer
public void setL2Bias(int l2bias)
public java.util.Enumeration<Option> listOptions()
listOptions
in interface OptionHandler
public java.lang.String[] getOptions()
getOptions
in interface OptionHandler
public void setOptions(java.lang.String[] options) throws java.lang.Exception
setOptions
in interface OptionHandler
options
- the list of options as an array of stringsjava.lang.Exception
- if an option is not supported