Skip to content

Commit

Permalink
Merge branch 'develop' of https://github.com/PaddlePaddle/Paddle into…
Browse files Browse the repository at this point in the history
… batch_norm
  • Loading branch information
qingqing01 committed Dec 21, 2016
2 parents e4c492d + 4e34220 commit 567871f
Show file tree
Hide file tree
Showing 9 changed files with 26 additions and 43 deletions.
12 changes: 6 additions & 6 deletions paddle/gserver/tests/test_ConvTrans.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -206,8 +206,8 @@ TEST(Layer, convTransLayerFwd2) {
/* filter_size */ 5,
result);

float resultData[] = {1, 2, 2, 2, 1, 2, 4, 4, 4, 2, 2, 4, 4,
4, 2, 2, 4, 4, 4, 2, 1, 2, 2, 2, 1};
real resultData[] = {1, 2, 2, 2, 1, 2, 4, 4, 4, 2, 2, 4, 4,
4, 2, 2, 4, 4, 4, 2, 1, 2, 2, 2, 1};
result->setData(resultData);
doOneConvtTest(/* imgSize */ 5,
/* output_x */ 2,
Expand All @@ -216,8 +216,8 @@ TEST(Layer, convTransLayerFwd2) {
/* filter_size */ 4,
result);

float resultData2[] = {1, 2, 2, 2, 1, 2, 4, 4, 4, 2, 2, 4, 4,
4, 2, 2, 4, 4, 4, 2, 1, 2, 2, 2, 1};
real resultData2[] = {1, 2, 2, 2, 1, 2, 4, 4, 4, 2, 2, 4, 4,
4, 2, 2, 4, 4, 4, 2, 1, 2, 2, 2, 1};
result->setData(resultData2);
doOneConvtTest(/* imgSize */ 5,
/* output_x */ 2,
Expand All @@ -226,8 +226,8 @@ TEST(Layer, convTransLayerFwd2) {
/* filter_size */ 5,
result);

float resultData3[] = {1, 1, 2, 1, 1, 1, 1, 2, 1, 1, 2, 2, 4,
2, 2, 1, 1, 2, 1, 1, 1, 1, 2, 1, 1};
real resultData3[] = {1, 1, 2, 1, 1, 1, 1, 2, 1, 1, 2, 2, 4,
2, 2, 1, 1, 2, 1, 1, 1, 1, 2, 1, 1};
result->setData(resultData3);
doOneConvtTest(/* imgSize */ 5,
/* output_x */ 2,
Expand Down
29 changes: 6 additions & 23 deletions paddle/gserver/tests/test_ConvUnify.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -106,8 +106,8 @@ TEST(Layer, convParaUnified) {
#ifndef PADDLE_ONLY_CPU
MatrixPtr input, resultCpu, resultGpu;
input = Matrix::create(1, 4 * 4, false, false);
float inputData[] = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16};
float param[] = {1, 2, 3, 4, 5, 6, 7, 8, 9, 9, 8, 7, 6, 5, 4, 3, 2, 1};
real inputData[] = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16};
real param[] = {1, 2, 3, 4, 5, 6, 7, 8, 9, 9, 8, 7, 6, 5, 4, 3, 2, 1};

input->setData(inputData);

Expand Down Expand Up @@ -137,26 +137,9 @@ TEST(Layer, convParaUnified) {
checkMatrixEqual(resultCpu, resultGpu);

input = Matrix::create(1, 3 * 3 * 2, false, false);
float inputData2[] = {1,
2,
3,
4,
5,
6,
7,
8,
9,

10,
11,
12,
13,
14,
15,
16,
17,
18};
float param2[] = {1, 2, 3, 4, 5, 6, 7, 8, 8, 7, 6, 5, 4, 3, 2, 1};
real inputData2[] = {
1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18};
real param2[] = {1, 2, 3, 4, 5, 6, 7, 8, 8, 7, 6, 5, 4, 3, 2, 1};

input->setData(inputData2);

Expand Down Expand Up @@ -185,7 +168,7 @@ TEST(Layer, convParaUnified) {
true);
checkMatrixEqual(resultCpu, resultGpu);

float param3[] = {1, 2, 3, 4, 4, 3, 2, 1};
real param3[] = {1, 2, 3, 4, 4, 3, 2, 1};

resultCpu = doOneConvTest(/* imgSize */ 3,
/* output_x */ 2,
Expand Down
6 changes: 3 additions & 3 deletions paddle/parameter/ParameterUpdaterBase.h
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ class ParameterUpdater {
virtual void startPass() {}

// called by Trainer then finishing a pass, ruturn true if pass accepted
virtual bool finishPass(real cost = 0) { return true; }
virtual bool finishPass() { return true; }

// called by Trainer before backward() of a batch
// Return the type of pass it needs. This pass type will be passed
Expand Down Expand Up @@ -112,9 +112,9 @@ class ParameterUpdaterComposite : public ParameterUpdater {
[&](int tid, size_t numThreads) { updaters_[tid]->startPass(); });
}

virtual bool finishPass(real cost = 0) {
virtual bool finishPass() {
syncThreadPool_->execPlusOwner(
[&](int tid, size_t numThreads) { updaters_[tid]->finishPass(cost); });
[&](int tid, size_t numThreads) { updaters_[tid]->finishPass(); });
return true;
}

Expand Down
8 changes: 4 additions & 4 deletions paddle/trainer/ParameterUpdater.h
Original file line number Diff line number Diff line change
Expand Up @@ -102,9 +102,9 @@ class SgdLocalUpdater : public ParameterUpdater {
* @param cost sum cost during one pass.
* @return true if accept (used for owlqn).
*/
virtual bool finishPass(real cost) {
virtual bool finishPass() {
optimizer_->finishPass();
return ParameterUpdater::finishPass(cost);
return ParameterUpdater::finishPass();
}

/**
Expand Down Expand Up @@ -220,9 +220,9 @@ class SgdUpdaterWithCpuAverager : public SgdLocalUpdater {
averager_->startPass();
SgdLocalUpdater::startPass();
}
virtual bool finishPass(real cost) {
virtual bool finishPass() {
averager_->finishPass();
return SgdLocalUpdater::finishPass(cost);
return SgdLocalUpdater::finishPass();
}

/// apply the averaged parameter to PARAMETER_VALUE
Expand Down
4 changes: 2 additions & 2 deletions paddle/trainer/RemoteParameterUpdater.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -309,7 +309,7 @@ void RemoteParameterUpdater::startPass() {
}
}

bool RemoteParameterUpdater::finishPass(real cost) {
bool RemoteParameterUpdater::finishPass() {
if (localUpdater_) {
localUpdater_->finishPass();
}
Expand Down Expand Up @@ -712,7 +712,7 @@ void SparseRemoteParameterUpdater::startPass() {
}
}

bool SparseRemoteParameterUpdater::finishPass(real cost) {
bool SparseRemoteParameterUpdater::finishPass() {
if (config_.algorithm() == TrainAlgorithm::SGD) {
parameterClient_->waitPassFinish();
} else {
Expand Down
4 changes: 2 additions & 2 deletions paddle/trainer/RemoteParameterUpdater.h
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,7 @@ class RemoteParameterUpdater : public ParameterUpdater {
*/
virtual void finishBatch(real cost);
virtual void startPass();
virtual bool finishPass(real cost);
virtual bool finishPass();

#ifndef PADDLE_DISABLE_TIMER
virtual void setForwardbackwardTime(uint64_t delta) {
Expand Down Expand Up @@ -281,7 +281,7 @@ class SparseRemoteParameterUpdater : public ParameterUpdater {
/// send all sparse related parameters to all pservers
virtual void finishBatch(real cost);
virtual void startPass();
virtual bool finishPass(real cost);
virtual bool finishPass();

virtual void apply();
virtual void restore();
Expand Down
2 changes: 1 addition & 1 deletion paddle/trainer/ThreadParameterUpdater.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ void SgdThreadUpdater::startPass() {
}
}

bool SgdThreadUpdater::finishPass(real cost) {
bool SgdThreadUpdater::finishPass() {
catchUpWith();

for (auto& para : parameters_) {
Expand Down
2 changes: 1 addition & 1 deletion paddle/trainer/ThreadParameterUpdater.h
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ class SgdThreadUpdater : public ParameterUpdater {
virtual void startPass();

// Use the finishPass() function of the base optimizer.
virtual bool finishPass(real cost);
virtual bool finishPass();

virtual void init(const std::vector<ParameterPtr>& parameters);
virtual PassType startBatch(int64_t batchSize);
Expand Down
2 changes: 1 addition & 1 deletion paddle/trainer/Trainer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -537,7 +537,7 @@ void Trainer::trainOnePassBatch(int passId) {

trainerInternal_.getGradientMachine()->onPassEnd();

bool accepted = trainerInternal_.getParameterUpdater()->finishPass(cost);
bool accepted = trainerInternal_.getParameterUpdater()->finishPass();

globalStat.setThreadInfo(true);
globalStat.printAllStatus();
Expand Down

0 comments on commit 567871f

Please sign in to comment.