-
Notifications
You must be signed in to change notification settings - Fork 18.7k
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Reshape layer #108
Closed
Closed
Reshape layer #108
Changes from all commits
Commits
Show all changes
8 commits
Select commit
Hold shift + click to select a range
8f24265
Added ReshapeLayer definition to vision_layers.hpp
sguada 4b02cd0
Added ReshapeLayer layer_factory.cpp
sguada e62a8c2
Added num,channel,height,width paramaters for the reshape layer
sguada 998a2fb
Added code for reshape_layer
sguada c1c6c4b
Added test for reshape_layer
sguada 06cbb3d
Fixed misspellig and fixed test loops
sguada 0a581f3
Renamed reshape_layer params
sguada c4dc09e
Renamed reshape params to new_num, new_channels, new_height and new_w…
sguada File filter
Filter by extension
Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,64 @@ | ||
// Copyright 2014 Sergio Guadarama | ||
|
||
#include <vector> | ||
#include <string> | ||
|
||
#include "caffe/layer.hpp" | ||
#include "caffe/vision_layers.hpp" | ||
#include "caffe/util/math_functions.hpp" | ||
|
||
namespace caffe { | ||
|
||
template <typename Dtype> | ||
void ReshapeLayer<Dtype>::SetUp(const vector<Blob<Dtype>*>& bottom, | ||
vector<Blob<Dtype>*>* top) { | ||
CHECK_EQ(bottom.size(), 1) << "Reshape Layer takes a single blob as input."; | ||
CHECK_EQ(top->size(), 1) << "Reshape Layer takes a single blob as output."; | ||
NUM_ = this->layer_param_.new_num(); | ||
CHANNELS_ = this->layer_param_.new_channels(); | ||
HEIGHT_ = this->layer_param_.new_height(); | ||
WIDTH_ = this->layer_param_.new_width(); | ||
COUNT_ = NUM_*CHANNELS_*HEIGHT_*WIDTH_; | ||
(*top)[0]->Reshape(NUM_, CHANNELS_, HEIGHT_, WIDTH_); | ||
CHECK_EQ(COUNT_, bottom[0]->count()); | ||
CHECK_EQ(COUNT_, (*top)[0]->count()); | ||
}; | ||
|
||
template <typename Dtype> | ||
void ReshapeLayer<Dtype>::Forward_cpu(const vector<Blob<Dtype>*>& bottom, | ||
vector<Blob<Dtype>*>* top) { | ||
const Dtype* bottom_data = bottom[0]->cpu_data(); | ||
Dtype* top_data = (*top)[0]->mutable_cpu_data(); | ||
caffe_copy(COUNT_, bottom_data, top_data); | ||
} | ||
|
||
template <typename Dtype> | ||
void ReshapeLayer<Dtype>::Forward_gpu(const vector<Blob<Dtype>*>& bottom, | ||
vector<Blob<Dtype>*>* top) { | ||
const Dtype* bottom_data = bottom[0]->gpu_data(); | ||
Dtype* top_data = (*top)[0]->mutable_gpu_data(); | ||
caffe_gpu_copy(COUNT_, bottom_data, top_data); | ||
} | ||
|
||
template <typename Dtype> | ||
Dtype ReshapeLayer<Dtype>::Backward_cpu(const vector<Blob<Dtype>*>& top, | ||
const bool propagate_down, vector<Blob<Dtype>*>* bottom) { | ||
const Dtype* top_diff = top[0]->cpu_diff(); | ||
Dtype* bottom_diff = (*bottom)[0]->mutable_cpu_diff(); | ||
caffe_copy(COUNT_, top_diff, bottom_diff); | ||
return Dtype(0.); | ||
} | ||
|
||
|
||
template <typename Dtype> | ||
Dtype ReshapeLayer<Dtype>::Backward_gpu(const vector<Blob<Dtype>*>& top, | ||
const bool propagate_down, vector<Blob<Dtype>*>* bottom) { | ||
const Dtype* top_diff = top[0]->gpu_diff(); | ||
Dtype* bottom_diff = (*bottom)[0]->mutable_gpu_diff(); | ||
caffe_gpu_copy(COUNT_, top_diff, bottom_diff); | ||
return Dtype(0.); | ||
} | ||
|
||
INSTANTIATE_CLASS(ReshapeLayer); | ||
|
||
} // namespace caffe |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,174 @@ | ||
// Copyright 2014 Sergio Guadarrama | ||
|
||
#include <cstring> | ||
// #include <cuda_runtime.h> | ||
|
||
#include "gtest/gtest.h" | ||
#include "caffe/blob.hpp" | ||
#include "caffe/common.hpp" | ||
#include "caffe/filler.hpp" | ||
#include "caffe/vision_layers.hpp" | ||
#include "caffe/test/test_gradient_check_util.hpp" | ||
|
||
#include "caffe/test/test_caffe_main.hpp" | ||
|
||
namespace caffe { | ||
|
||
extern cudaDeviceProp CAFFE_TEST_CUDA_PROP; | ||
|
||
template <typename Dtype> | ||
class ReshapeLayerTest : public ::testing::Test { | ||
protected: | ||
ReshapeLayerTest() | ||
: blob_bottom_(new Blob<Dtype>(2, 3, 6, 5)), | ||
blob_top_(new Blob<Dtype>()) { | ||
// fill the values | ||
FillerParameter filler_param; | ||
GaussianFiller<Dtype> filler(filler_param); | ||
filler.Fill(this->blob_bottom_); | ||
blob_bottom_vec_.push_back(blob_bottom_); | ||
blob_top_vec_.push_back(blob_top_); | ||
}; | ||
virtual ~ReshapeLayerTest() { delete blob_bottom_; delete blob_top_; } | ||
Blob<Dtype>* const blob_bottom_; | ||
Blob<Dtype>* const blob_top_; | ||
vector<Blob<Dtype>*> blob_bottom_vec_; | ||
vector<Blob<Dtype>*> blob_top_vec_; | ||
}; | ||
|
||
typedef ::testing::Types<float, double> Dtypes; | ||
TYPED_TEST_CASE(ReshapeLayerTest, Dtypes); | ||
|
||
TYPED_TEST(ReshapeLayerTest, TestSetup) { | ||
LayerParameter layer_param; | ||
layer_param.set_new_num(1); | ||
layer_param.set_new_channels(2 * 3); | ||
layer_param.set_new_height(6); | ||
layer_param.set_new_width(5); | ||
ReshapeLayer<TypeParam> layer(layer_param); | ||
layer.SetUp(this->blob_bottom_vec_, &(this->blob_top_vec_)); | ||
EXPECT_EQ(this->blob_top_->num(), 1); | ||
EXPECT_EQ(this->blob_top_->channels(), 2 * 3); | ||
EXPECT_EQ(this->blob_top_->height(), 6); | ||
EXPECT_EQ(this->blob_top_->width(), 5); | ||
} | ||
|
||
TYPED_TEST(ReshapeLayerTest, TestSetup2) { | ||
// Reshape like flatten | ||
LayerParameter layer_param; | ||
layer_param.set_new_num(2); | ||
layer_param.set_new_channels(3 * 6 * 5); | ||
layer_param.set_new_height(1); | ||
layer_param.set_new_width(1); | ||
ReshapeLayer<TypeParam> layer(layer_param); | ||
layer.SetUp(this->blob_bottom_vec_, &(this->blob_top_vec_)); | ||
EXPECT_EQ(this->blob_top_->num(), 2); | ||
EXPECT_EQ(this->blob_top_->channels(), 3 * 6 * 5); | ||
EXPECT_EQ(this->blob_top_->height(), 1); | ||
EXPECT_EQ(this->blob_top_->width(), 1); | ||
} | ||
|
||
TYPED_TEST(ReshapeLayerTest, TestCPU) { | ||
LayerParameter layer_param; | ||
layer_param.set_new_num(1); | ||
layer_param.set_new_channels(2 * 3); | ||
layer_param.set_new_height(6); | ||
layer_param.set_new_width(5); | ||
ReshapeLayer<TypeParam> layer(layer_param); | ||
Caffe::set_mode(Caffe::CPU); | ||
layer.SetUp(this->blob_bottom_vec_, &(this->blob_top_vec_)); | ||
layer.Forward(this->blob_bottom_vec_, &(this->blob_top_vec_)); | ||
for (int c = 0; c < 2 * 3; ++c) { | ||
for (int h = 0; h < 6; ++h) { | ||
for (int w = 0; w < 5; ++w) { | ||
EXPECT_EQ(this->blob_top_->data_at(0, c, h, w), | ||
this->blob_bottom_->data_at(c / 3, c % 3, h, w)); | ||
} | ||
} | ||
} | ||
} | ||
|
||
TYPED_TEST(ReshapeLayerTest, TestCPU2) { | ||
LayerParameter layer_param; | ||
layer_param.set_new_num(2); | ||
layer_param.set_new_channels(3 * 6 * 5); | ||
layer_param.set_new_height(1); | ||
layer_param.set_new_width(1); | ||
ReshapeLayer<TypeParam> layer(layer_param); | ||
Caffe::set_mode(Caffe::CPU); | ||
layer.SetUp(this->blob_bottom_vec_, &(this->blob_top_vec_)); | ||
layer.Forward(this->blob_bottom_vec_, &(this->blob_top_vec_)); | ||
for (int c = 0; c < 3 * 6 * 5; ++c) { | ||
EXPECT_EQ(this->blob_top_->data_at(0, c, 0, 0), | ||
this->blob_bottom_->data_at(0, c / (6 * 5), (c / 5) % 6, c % 5)); | ||
EXPECT_EQ(this->blob_top_->data_at(1, c, 0, 0), | ||
this->blob_bottom_->data_at(1, c / (6 * 5), (c / 5) % 6, c % 5)); | ||
} | ||
} | ||
|
||
|
||
|
||
TYPED_TEST(ReshapeLayerTest, TestGPU) { | ||
LayerParameter layer_param; | ||
layer_param.set_new_num(1); | ||
layer_param.set_new_channels(2 * 3); | ||
layer_param.set_new_height(6); | ||
layer_param.set_new_width(5); | ||
ReshapeLayer<TypeParam> layer(layer_param); | ||
Caffe::set_mode(Caffe::GPU); | ||
layer.SetUp(this->blob_bottom_vec_, &(this->blob_top_vec_)); | ||
layer.Forward(this->blob_bottom_vec_, &(this->blob_top_vec_)); | ||
for (int c = 0; c < 2 * 3; ++c) { | ||
for (int h = 0; h < 6; ++h) { | ||
for (int w = 0; w < 5; ++w) { | ||
EXPECT_EQ(this->blob_top_->data_at(0, c, h, w), | ||
this->blob_bottom_->data_at(c / 3, c % 3, h, w)); | ||
} | ||
} | ||
} | ||
} | ||
|
||
TYPED_TEST(ReshapeLayerTest, TestGPU2) { | ||
LayerParameter layer_param; | ||
layer_param.set_new_num(2); | ||
layer_param.set_new_channels(3 * 6 * 5); | ||
layer_param.set_new_height(1); | ||
layer_param.set_new_width(1); | ||
ReshapeLayer<TypeParam> layer(layer_param); | ||
Caffe::set_mode(Caffe::GPU); | ||
layer.SetUp(this->blob_bottom_vec_, &(this->blob_top_vec_)); | ||
layer.Forward(this->blob_bottom_vec_, &(this->blob_top_vec_)); | ||
for (int c = 0; c < 3 * 6 * 5; ++c) { | ||
EXPECT_EQ(this->blob_top_->data_at(0, c, 0, 0), | ||
this->blob_bottom_->data_at(0, c / (6 * 5), (c / 5) % 6, c % 5)); | ||
EXPECT_EQ(this->blob_top_->data_at(1, c, 0, 0), | ||
this->blob_bottom_->data_at(1, c / (6 * 5), (c / 5) % 6, c % 5)); | ||
} | ||
} | ||
|
||
TYPED_TEST(ReshapeLayerTest, TestCPUGradient) { | ||
LayerParameter layer_param; | ||
layer_param.set_new_num(1); | ||
layer_param.set_new_channels(2 * 3); | ||
layer_param.set_new_height(6); | ||
layer_param.set_new_width(5); | ||
Caffe::set_mode(Caffe::CPU); | ||
ReshapeLayer<TypeParam> layer(layer_param); | ||
GradientChecker<TypeParam> checker(1e-2, 1e-2); | ||
checker.CheckGradientExhaustive(layer, this->blob_bottom_vec_, this->blob_top_vec_); | ||
} | ||
|
||
TYPED_TEST(ReshapeLayerTest, TestGPUGradient) { | ||
LayerParameter layer_param; | ||
layer_param.set_new_num(1); | ||
layer_param.set_new_channels(2 * 3); | ||
layer_param.set_new_height(6); | ||
layer_param.set_new_width(5); | ||
Caffe::set_mode(Caffe::GPU); | ||
ReshapeLayer<TypeParam> layer(layer_param); | ||
GradientChecker<TypeParam> checker(1e-2, 1e-2); | ||
checker.CheckGradientExhaustive(layer, this->blob_bottom_vec_, this->blob_top_vec_); | ||
} | ||
|
||
|
||
} |
Add this suggestion to a batch that can be applied as a single commit.
This suggestion is invalid because no changes were made to the code.
Suggestions cannot be applied while the pull request is closed.
Suggestions cannot be applied while viewing a subset of changes.
Only one suggestion per line can be applied in a batch.
Add this suggestion to a batch that can be applied as a single commit.
Applying suggestions on deleted lines is not supported.
You must change the existing code in this line in order to create a valid suggestion.
Outdated suggestions cannot be applied.
This suggestion has been applied or marked resolved.
Suggestions cannot be applied from pending reviews.
Suggestions cannot be applied on multi-line comments.
Suggestions cannot be applied while the pull request is queued to merge.
Suggestion cannot be applied right now. Please check back later.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
How about
reshape_
instead ofnew_
just to make it really obvious?There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
I did that at first, but then I thought that other layers may want to use the same parameters to specify the new dimensions of the output. See for instance #120