Skip to content

Commit

Permalink
refine docs
Browse files Browse the repository at this point in the history
  • Loading branch information
fitzwang committed Jan 11, 2019
1 parent f01a4fb commit c472054
Show file tree
Hide file tree
Showing 19 changed files with 142 additions and 598 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -16,23 +16,24 @@ public AdaDeltaUpdateFunc() {
super();
}

public AdaDeltaUpdateFunc(int matId, int factor, double epsilon, double beta, double lr, double regL1Param, double regL2Param, int epoch) {
super(matId, new int[]{factor}, new double[]{epsilon, beta, lr, regL1Param, regL2Param, epoch, 1});
public AdaDeltaUpdateFunc(int matId, int factor, double epsilon, double alpha, double beta, double lr, double regL1Param, double regL2Param, int epoch) {
super(matId, new int[]{factor}, new double[]{epsilon, alpha, beta, lr, regL1Param, regL2Param, epoch, 1});
}

public AdaDeltaUpdateFunc(int matId, int factor, double epsilon, double beta, double lr, double regL1Param, double regL2Param, int epoch, int batchSize) {
super(matId, new int[]{factor}, new double[]{epsilon, beta, lr, regL1Param, regL2Param, epoch, batchSize});
public AdaDeltaUpdateFunc(int matId, int factor, double epsilon, double alpha, double beta, double lr, double regL1Param, double regL2Param, int epoch, int batchSize) {
super(matId, new int[]{factor}, new double[]{epsilon, alpha, beta, lr, regL1Param, regL2Param, epoch, batchSize});
}

@Override
public void update(ServerPartition partition, int factor, double[] scalars) {
double epsilon = scalars[0];
double beta = scalars[1];
double lr = scalars[2];
double l1RegParam = scalars[3];
double l2RegParam = scalars[4];
double epoch = (int) scalars[5];
double batchSize = (int) scalars[6];
double alpha = scalars[1];
double beta = scalars[2];
double lr = scalars[3];
double l1RegParam = scalars[4];
double l2RegParam = scalars[5];
double epoch = (int) scalars[6];
double batchSize = (int) scalars[7];

for (int f = 0; f < factor; f++) {
ServerRow gradientServerRow = partition.getRow(f + 3 * factor);
Expand All @@ -47,7 +48,7 @@ public void update(ServerPartition partition, int factor, double[] scalars) {
gradient.idiv(batchSize);
}

OptFuncs.iexpsmoothing2(square1, gradient, beta);
OptFuncs.iexpsmoothing2(square1, gradient, alpha);
Vector hessian = OptFuncs.adadeltahessian(square1, square2);

if (l2RegParam != 0) {
Expand All @@ -56,13 +57,12 @@ public void update(ServerPartition partition, int factor, double[] scalars) {

OptFuncs.iadadeltadelta(gradient, hessian, l2RegParam);
weight.isub(gradient);
OptFuncs.iexpsmoothing2(square2, gradient, beta);

if (l1RegParam != 0) {
OptFuncs.iadadeltathredshold(weight, hessian, l1RegParam, l2RegParam);
}

OptFuncs.iexpsmoothing2(square2, gradient, beta);

gradient.clear();
} finally {
gradientServerRow.endWrite();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -105,6 +105,14 @@ object MLConf {
// Momentum
val ML_OPT_MOMENTUM_MOMENTUM = "ml.opt.momentum.momentum"
val DEFAULT_ML_OPT_MOMENTUM_MOMENTUM = 0.9
// AdaDelta
val ML_OPT_ADADELTA_ALPHA = "ml.opt.adadelta.alpha"
val DEFAULT_ML_OPT_ADADELTA_ALPHA = 0.9
val ML_OPT_ADADELTA_BETA = "ml.opt.adadelta.beta"
val DEFAULT_ML_OPT_ADADELTA_BETA = 0.9
// AdaGrad
val ML_OPT_ADAGRAD_BETA = "ml.opt.adadelta.beta"
val DEFAULT_ML_OPT_ADADGRAD_BETA = 0.9
// Adam
val ML_OPT_ADAM_GAMMA = "ml.opt.adam.gamma"
val DEFAULT_ML_OPT_ADAM_GAMMA = 0.99
Expand All @@ -115,12 +123,6 @@ object MLConf {
val DEFAULT_ML_OPT_FTRL_ALPHA = 0.1
val ML_OPT_FTRL_BETA = "ml.opt.ftrl.beta"
val DEFAULT_ML_OPT_FTRL_BETA = 1.0
// AdaDelta
val ML_OPT_ADADELTA_BETA = "ml.opt.adadelta.beta"
val DEFAULT_ML_OPT_ADADELTA_BETA = 0.9
// AdaGrad
val ML_OPT_ADAGRAD_BETA = "ml.opt.adadelta.beta"
val DEFAULT_ML_OPT_ADADGRAD_BETA = 0.9

// Decays
val ML_OPT_DECAY_CLASS_NAME = "ml.opt.decay.class.name"
Expand All @@ -147,12 +149,6 @@ object MLConf {
val DEFAULT_ML_RANK_NUM = 8

// (MLP) Layer params
val ML_MLP_INPUT_LAYER_PARAMS = "ml.mlp.input.layer.params"
val DEFAULT_ML_MLP_INPUT_LAYER_PARAMS = "100,identity"
val ML_MLP_HIDEN_LAYER_PARAMS = "ml.mlp.hidden.layer.params"
val DEFAULT_ML_MLP_HIDEN_LAYER_PARAMS = "100,relu|100,relu|1,identity"
val ML_MLP_LOSS_LAYER_PARAMS = "ml.mlp.loss.layer.params"
val DEFAULT_ML_MLP_LOSS_LAYER_PARAMS = "logloss"
val ML_NUM_CLASS = "ml.num.class"
val DEFAULT_ML_NUM_CLASS = 2

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,22 +7,22 @@ import com.tencent.angel.ml.psf.optimizer.AdaDeltaUpdateFunc
import com.tencent.angel.psagent.PSAgentContext
import org.apache.commons.logging.LogFactory

class AdaDelta(stepSize: Double, val beta: Double = 0.9) extends Optimizer(stepSize) {
class AdaDelta(stepSize: Double, val alpha: Double, val beta: Double = 0.9) extends Optimizer(stepSize) {
private val LOG = LogFactory.getLog(classOf[AdaDelta])
override protected var numSlot: Int = 3

override def update(matrixId: Int, numFactors: Int, epoch: Int): Future[VoidResult] = {

val func = new AdaDeltaUpdateFunc(matrixId, numFactors, epsilon, beta, lr, regL1Param, regL2Param, epoch)
val func = new AdaDeltaUpdateFunc(matrixId, numFactors, epsilon, alpha, beta, lr, regL1Param, regL2Param, epoch)
PSAgentContext.get().getUserRequestAdapter.update(func)
}

override def update(matrixId: Int, numFactors: Int, epoch: Int, batchSize: Int): Future[VoidResult] = {
val func = new AdaDeltaUpdateFunc(matrixId, numFactors, epsilon, beta, lr, regL1Param, regL2Param, epoch, batchSize)
val func = new AdaDeltaUpdateFunc(matrixId, numFactors, epsilon, alpha, beta, lr, regL1Param, regL2Param, epoch, batchSize)
PSAgentContext.get().getUserRequestAdapter.update(func)
}

override def toString: String = {
s"AdaDelta beta=$beta lr=$lr regL2=$regL2Param regL1=$regL1Param epsilon=$epsilon"
s"AdaDelta alpha=$alpha beta=$beta lr=$lr regL2=$regL2Param regL1=$regL1Param epsilon=$epsilon"
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ object OptParams {
case jast: JString if nameMatch(jast, adagrad) =>
AdaGradParams(jast.extract[String].trim, Some(1.0), Some(0.9), None, None)
case jast: JString if nameMatch(jast, adadelta) =>
AdaDeltaParams(jast.extract[String].trim, Some(1.0), Some(0.9), None, None)
AdaDeltaParams(jast.extract[String].trim, Some(1.0), Some(0.9), Some(0.9), None, None)
case jast: JString if nameMatch(jast, sgd) =>
new OptParams(jast.extract[String].trim, Some(1.0), None, None)
case jast: JString => throw new AngelException(s"No such a optimizer: ${jast.extract[String]}!")
Expand Down Expand Up @@ -123,12 +123,17 @@ object OptParams {

AdaGradParams(optType, learningRate, beta, reg1, reg2)
case `adadelta` =>
val alpha: Option[Double] = json \ ParamKeys.alpha match {
case JNothing => Some(0.9)
case v: JValue => Some(v.extract[Double])
}

val beta: Option[Double] = json \ ParamKeys.beta match {
case JNothing => Some(0.9)
case v: JValue => Some(v.extract[Double])
}

AdaDeltaParams(optType, learningRate, beta, reg1, reg2)
AdaDeltaParams(optType, learningRate, alpha, beta, reg1, reg2)
case `sgd` =>
new OptParams(optType, learningRate, reg1, reg2)
case _ => throw new AngelException(s"No such a optimizer: $optType!")
Expand Down Expand Up @@ -193,6 +198,7 @@ case class AdaGradParams(override val name: String,

case class AdaDeltaParams(override val name: String,
override val lr: Option[Double],
alpha: Option[Double],
beta: Option[Double],
override val reg1: Option[Double],
override val reg2: Option[Double]) extends OptParams(name, lr, reg1, reg2) {
Expand Down
149 changes: 0 additions & 149 deletions docs/algo/admm_lr_on_angel.md

This file was deleted.

Loading

0 comments on commit c472054

Please sign in to comment.