Skip to content

Commit

Permalink
code cleanup
Browse files Browse the repository at this point in the history
  • Loading branch information
Yibing Liu committed May 23, 2017
1 parent 2e4c0bd commit 4d4593b
Show file tree
Hide file tree
Showing 2 changed files with 2 additions and 9 deletions.
3 changes: 2 additions & 1 deletion paddle/parameter/FirstOrderOptimizer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -305,12 +305,13 @@ void AdamaxParameterOptimizer::update(const VectorPtr vecs[],
void OptimizerWithGradientClipping::update(const VectorPtr vecs[],
const ParameterConfig& config,
size_t sparseId) const {
// globalGradientClipping(vecs, config, FLAGS_log_clipping);
real global_thres_ = optConfig_.gradient_clipping_threshold();
real local_thres_ = config.gradient_clipping_threshold();

real threshold;
std::string field;
// Get the minimum of local and global threshold
// as the real threshold for clipping
if (global_thres_ > 0.0f && local_thres_ > 0.0f) {
threshold = global_thres_ < local_thres_ ? global_thres_ : local_thres_;
field = global_thres_ < local_thres_ ? "global" : "local";
Expand Down
8 changes: 0 additions & 8 deletions paddle/parameter/ParameterOptimizer.h
Original file line number Diff line number Diff line change
Expand Up @@ -170,9 +170,6 @@ class ParameterOptimizer {

real getLearningRate() const { return learningRate_; }

// real getGradientClippingThreshold() const {return
// gradientClippingThreshold_;}

virtual void setNoDecay() { applyDecay_ = false; }

static ParameterOptimizer* create(const OptimizationConfig& optConfig,
Expand Down Expand Up @@ -206,11 +203,6 @@ class ParameterOptimizer {
*/
real learningRate_;

/**
* global threshold for gradient clipping,
* init value is opt_config.gradient_clipping_thresholod
*/

std::unique_ptr<LearningRateScheduler> learningRateScheduler_;
int64_t pass_; // current training pass (starting from 0)
bool firstTime_;
Expand Down

0 comments on commit 4d4593b

Please sign in to comment.