Skip to content
This repository has been archived by the owner on May 24, 2018. It is now read-only.

add get weight task #76

Merged
merged 4 commits into from
Mar 29, 2015
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ include mshadow/make/mshadow.mk
# all tge possible warning tread
WARNFLAGS= -Wall
CFLAGS = -DMSHADOW_FORCE_STREAM $(WARNFLAGS)
CFLAGS += -g -O3 -I./mshadow/ -fPIC $(MSHADOW_CFLAGS)
CFLAGS += -g -O3 -I./mshadow/ -fPIC $(MSHADOW_CFLAGS)
LDFLAGS = -pthread $(MSHADOW_LDFLAGS)
NVCCFLAGS = --use_fast_math -g -O3 -ccbin $(CXX) $(MSHADOW_NVCCFLAGS)

Expand Down
42 changes: 40 additions & 2 deletions src/cxxnet_main.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,9 @@ class CXXNetLearnTask {
reset_net_type = -1;
extract_node_name = "";
output_format = 1;
weight_name = "wmat";
extract_layer_name = "";
weight_filename = "";
#if MSHADOW_USE_CUDA
this->SetParam("dev", "gpu");
#else
Expand Down Expand Up @@ -77,6 +80,7 @@ class CXXNetLearnTask {
if (task == "train" || task == "finetune") this->TaskTrain();
if (task == "pred") this->TaskPredict();
if (task == "extract") this->TaskExtractFeature();
if (task == "get_weight") this->TaskGetWeight();
return 0;
}

Expand All @@ -97,6 +101,8 @@ class CXXNetLearnTask {
if (!strcmp(name, "dev")) device = val;
if (!strcmp(name, "test_io")) test_io = atoi(val);
if (!strcmp(name, "extract_node_name")) extract_node_name = val;
if (!strcmp(name, "extract_layer_name")) extract_layer_name = val;
if (!strcmp(name, "weight_filename")) weight_filename = val;
if (!strcmp(name, "output_format")) {
if (!strcmp(val, "txt")) output_format = 1;
else output_format = 0;
Expand Down Expand Up @@ -239,8 +245,7 @@ class CXXNetLearnTask {
itr_evals.push_back(cxxnet::CreateIterator(itcfg));
eval_names.push_back(evname);
}
if (flag == 3 && (task == "pred" || task == "pred_raw" ||
task == "extract")) {
if (flag == 3 && (task == "pred" || task == "extract")) {
utils::Assert(itr_pred == NULL, "can only have one data:test");
itr_pred = cxxnet::CreateIterator(itcfg);
}
Expand Down Expand Up @@ -281,6 +286,33 @@ class CXXNetLearnTask {
fclose(fo);
printf("finished prediction, write into %s\n", name_pred.c_str());
}
inline void TaskGetWeight(void) {
FILE *fo = utils::FopenCheck(weight_filename.c_str(), "wb");
mshadow::TensorContainer<mshadow::cpu, 2> weight;
std::vector<index_t> shape;
net_trainer->GetWeight(&weight, &shape, extract_layer_name.c_str(), weight_name.c_str());
for (index_t i = 0; i < weight[i].size(0); ++i) {
mshadow::Tensor<mshadow::cpu, 2> d = weight[i].FlatTo2D();
for (index_t j = 0; j < d.size(0); ++j) {
if (output_format) {
for (index_t k = 0; k < d.size(1); ++k) {
fprintf(fo, "%g ", d[j].dptr_[k]);
}
fprintf(fo, "\n");
} else {
fwrite(d[j].dptr_, sizeof(float), d.size(1), fo);
}
}
}
fclose(fo);
std::string name_meta = weight_filename + ".meta";
FILE *fm = utils::FopenCheck(name_meta.c_str(), "w");
for (index_t i = 0; i < shape.size(); ++i) {
fprintf(fm, "%u ", shape[i]);
}
fclose(fm);
printf("finished getting weight, write into %s\n", weight_filename.c_str());
}
inline void TaskExtractFeature() {
long nrow = 0;
mshadow::Shape<3> dshape;
Expand Down Expand Up @@ -468,6 +500,12 @@ class CXXNetLearnTask {
std::string extract_node_name;
/*! \brief output format of network */
int output_format;
/*! \brief the layer name for weight extraction */
std::string extract_layer_name;
/*! \brief the output filename of the extracted weight */
std::string weight_filename;
/*! \brief wmat of bias */
std::string weight_name;
};
} // namespace cxxnet

Expand Down
1 change: 0 additions & 1 deletion src/io/image_augmenter-inl.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,6 @@ class ImageAugmenter {
// new width and height
float new_width = std::max(min_img_size_, std::min(max_img_size_, scale * src.cols));
float new_height = std::max(min_img_size_, std::min(max_img_size_, scale * src.rows));
//printf("%f %f %f %f %f %f %f %f %f\n", s, a, b, scale, ratio, hs, ws, new_width, new_height);
cv::Mat M(2, 3, CV_32F);
M.at<float>(0, 0) = hs * a - s * b * ws;
M.at<float>(1, 0) = -b * ws;
Expand Down
3 changes: 2 additions & 1 deletion src/nnet/neural_net-inl.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -563,10 +563,11 @@ class NeuralNetThread {
oparam_weight->Resize(mshadow::Shape2(0, 0));
} else {
oparam_weight->Resize(vs.data[0].shape_);
mshadow::Copy(*oparam_weight, vs.data[0]);
mshadow::Copy(*oparam_weight, vs.data[0], stream);
*oparam_shape = vs.shapes[0];
utils::Assert(vs.fields[0] == iparam_tag,
"GetWeight:shape mismatch");
stream->Wait();
}
return;
}
Expand Down