变长度输入宗量= nnet.internal.cnn.layer.util.gatherParametersToCPU(变长度输入宗量);
args = nnet.cnn.layer.GRULayer.parseInputArguments(变长度输入宗量{:});
internalLayer = nnet.internal.cnn.layer.GRU (args.Name,…
iGetReturnSequence (args.OutputMode),…
args.StateActivationFunction,…
args.GateActivationFunction,…
层= nnet.cnn.layer.GRULayer (internalLayer);
层。InputWeights = args.InputWeights;
层。InputWeightsL2Factor = args.InputWeightsL2Factor;
层。InputWeightsLearnRateFactor = args.InputWeightsLearnRateFactor;
层。InputWeightsInitializer = args.InputWeightsInitializer;
层。RecurrentWeights = args.RecurrentWeights;
层。RecurrentWeightsL2Factor = args.RecurrentWeightsL2Factor;
层。RecurrentWeightsLearnRateFactor = args.RecurrentWeightsLearnRateFactor;
层。RecurrentWeightsInitializer = args.RecurrentWeightsInitializer;
层。偏见L2Factor = args.BiasL2Factor;
层。偏见LearnRateFactor = args.BiasLearnRateFactor;
层。偏见初始值设定项=args.BiasInitializer;
层。HiddenState = args.HiddenState;
函数tf = iGetReturnSequence(模式)
函数[h, h] = gruForwardGeneral (X,可学的状态,选项)
(皮、zInd后)= nnet.internal.cnn.util.gruGateIndices (numHidden);
h = 0 (numHidden N T,“喜欢”,X);
如果isstring (options.StateActivationFunction) | | ischar (options.StateActivationFunction)
stateActivationFunction = iGetStateActivation(选项。StateActivationFunction);
elseifisa (options.StateActivationFunction“function_handle”)
stateActivationFunction = options.StateActivationFunction;
如果isstring (options.GateActivationFunction) | | ischar (options.GateActivationFunction)
gateActivationFunction = iGetGateActivation(选项。GateActivationFunction);
elseifisa (options.GateActivationFunction“function_handle”)
gateActivationFunction = options.GateActivationFunction;
rz = gateActivationFunction (Wrz * X (:,: 1) + Rrz * h0 +青铜);
hs = stateActivationFunction (Wh * X (:,: 1) + r。* (Rh * h0) + bh);
h (:: 1) = (1 - z)。* h + z。* h0;
rz = gateActivationFunction (Wrz * X (:,: tt) + Rrz * hIdx +青铜);
hs = stateActivationFunction (Wh * X (:,: tt) + r。* (Rh * hIdx) + bh);
h (:,:, tt) = (1 - z)。* h + z。* hIdx;
函数行动= iGetStateActivation(激活)
行动= @nnet.internal.cnnhost.tanhForward;
函数行动= iGetGateActivation(激活)
行动= @nnet.internal.cnnhost.sigmoidForward;
行动= @nnet.internal.cnnhost.hardSigmoidForward;
行动= @nnet.internal.cnnhost.tanhForward;
行动= @nnet.internal.cnnhost.hardSigmoidForward;
classdefGRULayer < nnet.cnn.layer.Layer & nnet.internal.cnn.layer.Externalizable
InputWeightsLearnRateFactor(:1){mustBeNumeric, iCheckFactorDimensions}
InputWeightsL2Factor(:1){mustBeNumeric, iCheckFactorDimensions}
RecurrentWeightsInitializer
RecurrentWeightsLearnRateFactor(:1){mustBeNumeric, iCheckFactorDimensions}
RecurrentWeightsL2Factor(:1){mustBeNumeric, iCheckFactorDimensions}
BiasLearnRateFactor(:1){mustBeNumeric, iCheckFactorDimensions}
BiasL2Factor(:1){mustBeNumeric, iCheckFactorDimensions}
函数这= GRULayer (privateLayer)
这一点。PrivateLayer = PrivateLayer;
val = this.PrivateLayer.Name;
iAssertValidLayerName (val);
this.PrivateLayer。Name = char (val);
val = this.PrivateLayer.InputSize;
函数val = get.NumHiddenUnits(这)
val = this.PrivateLayer.HiddenSize;
函数val = get.OutputMode(这)
val = iGetOutputMode (this.PrivateLayer。ReturnSequence);
函数val = get.StateActivationFunction(这)
val = this.PrivateLayer.Activation;
函数val = get.GateActivationFunction(这)
val = this.PrivateLayer.RecurrentActivation;
函数val = get.InputWeights(这)
val = this.PrivateLayer.InputWeights.HostValue;
函数val = get.ResetGateMode(这)
val = this.PrivateLayer.ResetGateMode;
函数这= set.InputWeights(这个值)
如果isequal (this.InputSize“汽车”)
expectedInputSize = this.InputSize;
属性= {“大小”(3 *。NumHiddenUnits expectedInputSize),…
值= iGatherAndValidateParameter(价值、属性);
这一点。PrivateLayer = this.PrivateLayer.configureForInputs (…
{iMakeSizeOnlyArray([大小(价值,2)南南),“认知行为治疗”)});
this.PrivateLayer.InputWeights。值=价值;
函数val = get.InputWeightsInitializer(这)
如果iIsCustomInitializer (this.PrivateLayer.InputWeights.Initializer)
val = this.PrivateLayer.InputWeights.Initializer.Fcn;
val = this.PrivateLayer.InputWeights.Initializer.Name;
函数这= set.InputWeightsInitializer(这个值)
价值= iAssertValidWeightsInitializer(价值,“InputWeightsInitializer”);
this.PrivateLayer.InputWeights。初始值设定项=…
iInitializerFactory(价值2 1);
函数val = get.RecurrentWeights(这)
val = this.PrivateLayer.RecurrentWeights.HostValue;
函数这= set.RecurrentWeights(这个值)
属性= {“大小”(3 *。NumHiddenUnits this.NumHiddenUnits),…
值= iGatherAndValidateParameter(价值、属性);
this.PrivateLayer.RecurrentWeights。值=价值;
函数val = get.RecurrentWeightsInitializer(这)
如果iIsCustomInitializer (this.PrivateLayer.RecurrentWeights.Initializer)
val = this.PrivateLayer.RecurrentWeights.Initializer.Fcn;
val = this.PrivateLayer.RecurrentWeights.Initializer.Name;
函数这= set.RecurrentWeightsInitializer(这个值)
价值= iAssertValidWeightsInitializer(价值,“RecurrentWeightsInitializer”);
this.PrivateLayer.RecurrentWeights。初始值设定项=…
iInitializerFactory(价值2 1);
val = this.PrivateLayer.Bias.HostValue;
biasnrowfactor = 1 +双(isequal (this.ResetGateMode,…
“recurrent-bias-after-multiplication”));
属性= {“列”,“真实”的,“nonsparse”};
值= iGatherAndValidateParameter(价值、属性);
expectedSize = 3 * biasnrowfactor * this.NumHiddenUnits;
如果长度(值)~ = expectedSize & & ~ isequal(价值,[])
错误消息(“nnet_cnn:层:GRULayer: BiasSize,…
3 * biasnrowfactor this.ResetGateMode));
this.PrivateLayer.Bias。值=价值;
函数val = get.BiasInitializer(这)
如果iIsCustomInitializer (this.PrivateLayer.Bias.Initializer)
val = this.PrivateLayer.Bias.Initializer.Fcn;
val = this.PrivateLayer.Bias.Initializer.Name;
函数这= set.BiasInitializer(这个值)
值= iAssertValidBiasInitializer(价值);
this.PrivateLayer.Bias。初始值设定项=iInitializerFactory(value,…
函数val = get.HiddenState(这)
val =收集(this.PrivateLayer.HiddenState.Value);
函数这= set.HiddenState(这个值)
价值= iGatherAndValidateParameter(价值,“默认”,(这一点。NumHiddenUnits 1]);
this.PrivateLayer。InitialHiddenState =价值;
this.PrivateLayer.HiddenState。值=价值;
函数val = get.InputWeightsLearnRateFactor(这)
val = this.getFactor (this.PrivateLayer.InputWeights.LearnRateFactor);
函数这= set.InputWeightsLearnRateFactor(这个,val)
this.PrivateLayer.InputWeights。LearnRateFactor = this.setFactor (val);
函数val = get.InputWeightsL2Factor(这)
val = this.getFactor (this.PrivateLayer.InputWeights.L2Factor);
函数这= set.InputWeightsL2Factor(这个,val)
this.PrivateLayer.InputWeights。L2Factor = this.setFactor (val);
函数val = get.RecurrentWeightsLearnRateFactor(这)
val = this.getFactor (this.PrivateLayer.RecurrentWeights.LearnRateFactor);
函数这= set.RecurrentWeightsLearnRateFactor(这个,val)
this.PrivateLayer.RecurrentWeights。LearnRateFactor = this.setFactor (val);
函数val = get.RecurrentWeightsL2Factor(这)
val = this.getFactor (this.PrivateLayer.RecurrentWeights.L2Factor);
函数这= set.RecurrentWeightsL2Factor(这个,val)
this.PrivateLayer.RecurrentWeights。L2Factor = this.setFactor (val);
函数val = get.BiasLearnRateFactor(这)
val = this.getFactor (this.PrivateLayer.Bias.LearnRateFactor);
函数这= set.BiasLearnRateFactor(这个,val)
this.PrivateLayer.Bias。LearnRateFactor = this.setFactor (val);
函数val = get.BiasL2Factor(这)
val = this.getFactor (this.PrivateLayer.Bias.L2Factor);
函数这= set.BiasL2Factor(这个,val)
this.PrivateLayer.Bias。L2Factor = this.setFactor (val);
函数val = get.OutputSize(这)
val = this.NumHiddenUnits;
函数val = get.OutputState(这)
privateLayer = this.PrivateLayer;
出去了。的名字= privateLayer.Name;
出去了。InputSize = privateLayer.InputSize;
出去了。NumHiddenUnits = privateLayer.HiddenSize;
出去了。ReturnSequence = privateLayer.ReturnSequence;
出去了。ResetGateMode = privateLayer.ResetGateMode;
出去了。StateActivationFunction = privateLayer.Activation;
出去了。GateActivationFunction = privateLayer.RecurrentActivation;
出去了。InputWeights = toStruct (privateLayer.InputWeights);
出去了。RecurrentWeights = toStruct (privateLayer.RecurrentWeights);
出去了。偏见= toStruct(privateLayer.Bias);
出去了。HiddenState = toStruct (privateLayer.HiddenState);
出去了。InitialHiddenState =收集(privateLayer.InitialHiddenState);
函数inputArguments = parseInputArguments(变长度输入宗量)
parser.parse(变长度输入宗量{:});
inputArguments = iConvertToCanonicalForm(解析器);
inputArguments。InputSize = [];
internalLayer = nnet.internal.cnn.layer.GRU (in.Name,…
in.StateActivationFunction,…
in.GateActivationFunction,…
internalLayer。InputWeights= nnet.internal.cnn.layer.learnable.PredictionLearnableParameter.fromStruct(in.InputWeights);
internalLayer。RecurrentWeights = nnet.internal.cnn.layer.learnable.PredictionLearnableParameter.fromStruct (in.RecurrentWeights);
internalLayer。偏见= nnet.internal.cnn.layer.learnable.PredictionLearnableParameter.fromStruct(in.Bias);
internalLayer。HiddenState= nnet.internal.cnn.layer.dynamic.TrainingDynamicParameter.fromStruct(in.HiddenState);
internalLayer。InitialHiddenState = in.InitialHiddenState;
这= nnet.cnn.layer.GRULayer (internalLayer);
函数(描述、类型)= getOneLineDisplay (obj)
“nnet_cnn:层:GRULayer: oneLineDisplay,…
num2str (obj.NumHiddenUnits));
类型= iGetMessageString (“nnet_cnn:层:GRULayer:类型”);
函数组= getPropertyGroups(这)
generalParameters = {“名字”};
hyperParameters = {“InputSize”,…
“StateActivationFunction”,…
“GateActivationFunction”,…
learnableParameters = {“InputWeights”,…
stateParameters = {“HiddenState”};
这一点。propertyGroupGeneral (generalParameters)
这一点。propertyGroupHyperparameters (hyperParameters)
这一点。propertyGroupLearnableParameters (learnableParameters)
这一点。propertyGroupDynamicParameters (stateParameters)
variableName = inputname (1);
页脚=。createShowAllPropertiesFooter (variableName);
elseif元素个数(val) = = (3 * this.NumHiddenUnits)
val = val (1: this.NumHiddenUnits:结束);
expandedValues = repelem (val,这。NumHiddenUnits);
val = expandedValues (:);
函数messageString = iGetMessageString(变长度输入宗量)
messageString = getString(消息(变长度输入宗量{:}));
defaultStateActivationFunction =的双曲正切;
defaultGateActivationFunction =“乙状结肠”;
defaultWeightLearnRateFactor = 1;
defaultBiasLearnRateFactor = 1;
defaultWeightL2Factor = 1;
defaultInputWeightsInitializer =“glorot”;
defaultRecurrentWeightsInitializer =“正交”;
defaultBiasInitializer =“零”;
defaultResetGateMode =“after-multiplication”;
p.addRequired (“NumHiddenUnits”@ (x) validateattributes (x, {“数字”},{“标量”,“积极”,“整数”}));
p.addParameter (“名字”、defaultName @nnet.internal.cnn.layer.paramvalidation.validateLayerName);
p.addParameter (“OutputMode”、defaultOutputMode @ (x)任何(iAssertAndReturnValidOutputMode (x)));
p.addParameter (“StateActivationFunction”、defaultStateActivationFunction @ (x)任何(iAssertAndReturnValidStateActivation (x)));
p.addParameter (“GateActivationFunction”、defaultGateActivationFunction @ (x)任何(iAssertAndReturnValidGateActivation (x)));
p.addParameter (“InputWeightsLearnRateFactor”defaultWeightLearnRateFactor, @ (x) iAssertValidFactor (x));
p.addParameter (“RecurrentWeightsLearnRateFactor”defaultWeightLearnRateFactor, @ (x) iAssertValidFactor (x));
p.addParameter (“BiasLearnRateFactor”defaultBiasLearnRateFactor, @ (x) iAssertValidFactor (x));
p.addParameter (“InputWeightsL2Factor”defaultWeightL2Factor, @ (x) iAssertValidFactor (x));
p.addParameter (“RecurrentWeightsL2Factor”defaultWeightL2Factor, @ (x) iAssertValidFactor (x));
p.addParameter (“BiasL2Factor”defaultBiasL2Factor, @ (x) iAssertValidFactor (x));
p.addParameter (“InputWeightsInitializer”,defaultInputWeightsInitializer);
p.addParameter (“RecurrentWeightsInitializer”,defaultRecurrentWeightsInitializer);
p.addParameter (“BiasInitializer”,defaultBiasInitializer);
p.addParameter (“InputWeights”,defaultLearnable);
p.addParameter (“RecurrentWeights”,defaultLearnable);
p.addParameter (“偏见”,defaultLearnable);
p.addParameter (“HiddenState”,defaultState);
p.addParameter (“ResetGateMode”、defaultResetGateMode @ (x)任何(iAssertAndReturnValidResetGateMode (x)));
函数inputArguments = iConvertToCanonicalForm(解析)
inputArguments。NumHiddenUnits= double( results.NumHiddenUnits );
inputArguments。的名字= convertStringsToChars(results.Name);
inputArguments。OutputMode= iAssertAndReturnValidOutputMode(results.OutputMode);
inputArguments。StateActivationFunction = iAssertAndReturnValidStateActivation (convertStringsToChars (results.StateActivationFunction));
inputArguments。GateActivationFunction = iAssertAndReturnValidGateActivation (convertStringsToChars (results.GateActivationFunction));
inputArguments。InputWeightsLearnRateFactor = results.InputWeightsLearnRateFactor;
inputArguments。RecurrentWeightsLearnRateFactor = results.RecurrentWeightsLearnRateFactor;
inputArguments。偏见LearnRateFactor = results.BiasLearnRateFactor;
inputArguments。InputWeightsL2Factor = results.InputWeightsL2Factor;
inputArguments。RecurrentWeightsL2Factor = results.RecurrentWeightsL2Factor;
inputArguments。BiasL2Factor= results.BiasL2Factor;
inputArguments。InputWeights初始值设定项=results.InputWeightsInitializer;
inputArguments。RecurrentWeightsInitializer = results.RecurrentWeightsInitializer;
inputArguments。BiasInitializer= results.BiasInitializer;
inputArguments。InputWeights= results.InputWeights;
inputArguments。RecurrentWeights = results.RecurrentWeights;
inputArguments。偏见= results.Bias;
inputArguments。HiddenState= results.HiddenState;
inputArguments。ResetGateMode= iAssertAndReturnValidResetGateMode(results.ResetGateMode);
函数模式= iGetOutputMode (tf)
函数iCheckFactorDimensions(值)
如果~(昏暗的= = 1 | |昏暗的= = 3)
异常= MException(消息(“nnet_cnn:层:GRULayer: InvalidFactor));
函数validString = iAssertAndReturnValidOutputMode(值)
validString = validatestring(价值,{“序列”,“最后一次”});
函数validString = iAssertAndReturnValidStateActivation(值)
validString = validatestring(价值,{的双曲正切,“softsign”});
函数validString = iAssertAndReturnValidGateActivation(值)
validString = validatestring(价值,{“乙状结肠”,的双曲正切,“hard-sigmoid”,“radbasn”});
validateattributes(价值,{“数字”},{“向量”,“真实”的,非负的,“有限”});
函数值= iAssertValidWeightsInitializer(价值,名称)
validateattributes(价值,{“function_handle”,“字符”,“字符串”},{});
如果(ischar(价值)| | isstring(值))
价值= validatestring(价值,{“narrow-normal”,…
函数值= iAssertValidBiasInitializer(值)
validateattributes(价值,{“function_handle”,“字符”,“字符串”},{});
如果(ischar(价值)| | isstring(值))
价值= validatestring(价值,{“零”,…
函数初始值设定项= iInitializerFactory(变长度输入宗量)
初始值设定项= nnet.internal.cnn.layer.learnable.initializer…
.initializerFactory(变长度输入宗量{:});
函数tf = iIsCustomInitializer (init)
tf = isa(初始化,“nnet.internal.cnn.layer.learnable.initializer.Custom”);
函数iAssertValidLayerName(名字)
nnet.internal.cnn.layer.paramvalidation.validateLayerName(名称));
函数值= iGatherAndValidateParameter(变长度输入宗量)
值= nnet.internal.cnn.layer.paramvalidation…
.gatherAndValidateNumericParameter(变长度输入宗量{:});
函数值= iAssertAndReturnValidResetGateMode(值)
价值= validatestring(价值,{“after-multiplication”,“before-multiplication”,“recurrent-bias-after-multiplication”});
函数dlX = iMakeSizeOnlyArray(变长度输入宗量)
dlX = deep.internal.PlaceholderArray(变长度输入宗量{:});