Using Caffe to do regression (regression), multi-label training _caffe

Source: Internet
Author: User
Tags shuffle

Regression:

Reprint website:

Http://www.cnblogs.com/frombeijingwithlove/p/5314042.html


See a lot, think this also can, recommended use.

According to the author method can be implemented, is a basic module, and then others can extend itself.


Multiple Tags:

Reprint website:

1, modify the source code: http://blog.csdn.net/hubin232/article/details/50960201 In accordance with the method of Bowen more fixed, not too flexible.

2, input data and labels, respectively, lmdb format data: http://blog.csdn.net/hyman_yx/article/details/51791136

Inside the tag storage and data storage methods can be changed, the code is as follows:

int main (int argc, char** argv) {#ifdef USE_OPENCV:: google::initgooglelogging (argv[0));

Print output to stderr (while still logging) Flags_alsologtostderr = 1;
#ifndef gflags_gflags_h_ namespace gflags = Google; #endif gflags::setusagemessage ("Convert a set of images to the leveldb/lmdb\n" "format used as input for caffe.\n" "  usage:\n "" Convert_imageset [FLAGS] rootfolder/listfile db_name\n "" The Imagenet DataSet for the training demo is
	at\n "" http://www.image-net.org/download-images\n ");

	GFlags::P arsecommandlineflags (&ARGC, &ARGV, true);
		if (ARGC < 6) {gflags::showusagewithflagsrestrict (argv[0], "Tools/convert_imageset");
	return 1; const BOOL Is_color =!
	Flags_gray;
	const BOOL Check_size = Flags_check_size;
	const BOOL encoded = flags_encoded;

	Const string encode_type = Flags_encode_type;
	Std::ifstream infile (argv[2]);
	STD::VECTOR&LT;STD::p air<std::string, std::vector<float>> > lines;

	std::string filename;std::string label_count_string = argv[5];
	
	int label_count = Std::atoi (Label_count_string.c_str ());
	
	Std::vector<float> label (Label_count);
			
		while (infile >> filename) {for (int i = 0; i < label_count;i++) {infile >> label[i];
	} lines.push_back (Std::make_pair (filename, label));
		} if (flags_shuffle) {//Randomly shuffle data LOG (INFO) << "shuffling data";
	Shuffle (Lines.begin (), Lines.end ());

	LOG (INFO) << "A total of" << lines.size () << "Images";

	if (Encode_type.size () &&!encoded) LOG (INFO) << "Encode_type specified, assuming encoded=true.";
	int resize_height = std::max<int> (0, flags_resize_height);

	int resize_width = std::max<int> (0, Flags_resize_width);
	Create new DB scoped_ptr<db::D b> db_image (Db::getdb (flags_backend));
	SCOPED_PTR&LT;DB::D b> Db_label (Db::getdb (flags_backend));
	Db_image->open (Argv[3], db::new);
	Db_label->open (Argv[4], db::new);Scoped_ptr<db::transaction> Txn_image (Db_image->newtransaction ());

	Scoped_ptr<db::transaction> Txn_label (Db_label->newtransaction ());
	Storing to DB std::string Root_folder (argv[1));
	Datum Datum_label;
	Datum Datum_image;
	int count = 0;
	int Data_size_label = 0;
	int data_size_image = 0;

	BOOL data_size_initialized = false;
		for (int line_id = 0; line_id < lines.size (); ++line_id) {bool status;
		std::string enc = encode_type; if (encoded &&!enc.size ()) {//Guess the encoding type from the file name string fn = Lines[line_id].first
			;
			size_t p = fn.rfind ('. ');
			if (p = = fn.npos) LOG (WARNING) << "Failed to guess" encoding of ' "<< fn <<" ";
			ENC = FN.SUBSTR (p);
		Std::transform (Enc.begin (), Enc.end (), Enc.begin (),:: ToLower); Status = Readimagetodatum (Root_folder + Lines[line_id].first, lines[line_id].second[0), Resize_height, Resize_widt
		H, Is_color, Enc, &datum_image); if (status = FALSE) continue;
		Datum_label.set_height (1);
		Datum_label.set_width (1);
		Datum_label.set_channels (Label_count);
		int count_tmp = Datum_label.float_data_size (); for (int index_label = 0; Index_label < lines[line_id].second.size (); index_label++) {Float Tmp_float_value = Lin
			Es[line_id].second[index_label];
		Datum_label.add_float_data (Tmp_float_value); } if (check_size) {if (!data_size_initialized) {Data_size_label = Datum_label.channels () * Datum_label.heigh
				T () * datum_label.width ();
				Data_size_image = Datum_image.channels () * datum_image.height () * Datum_image.width ();
			Data_size_initialized = true;
				else {Const std::string& Data_label = Datum_label.data ();

				Check_eq (Data_label.size (), Data_size_label) << "Incorrect data field size" << data_label.size ();
				Const std::string& data_image = Data_image.data (); Check_eq (Data_image.size (), data_size_image) << "Incorrect data field size" <<
			Data_image.size ();
		}//Sequential string key_str_image = Caffe::format_int (line_id, 8) + "_" + Lines[line_id].first;

		String Key_str_label = Caffe::format_int (line_id, 8) + "Label_" + lines[line_id].first;
		Put in db string Out_label;
		String Out_image; CHECK (Datum_label.
		Serializetostring (&out_label)); CHECK (datum_image.

		Serializetostring (&out_image));
		Datum_label.clear_float_data ();
		Txn_label->put (Key_str_label, Out_label);
		Txn_image->put (Key_str_image, out_image);
			if (++count% 1000 = = 0) {//Commit db Txn_image->commit ();

			Txn_image.reset (Db_image->newtransaction ());
			Txn_label->commit ();
			Txn_label.reset (Db_label->newtransaction ());
	LOG (INFO) << "processed" << count << "files.";}}
		Write the last batch if (count% 1000!= 0) {txn_label->commit ();
		Txn_image->commit ();
LOG (INFO) << "processed" << count << "files.";} #else LOG (FATAL) << "This tool requires OpenCV;
compile with USE_OPENCV. ";
#endif//USE_OPENCV return 0; }

Note that the data for train and Val must not be the same, because there is a limit to the caffe of data read, otherwise it will not be able to read data.

Prototxt and the Web site is the same as the wording.





Contact Us

The content source of this page is from Internet, which doesn't represent Alibaba Cloud's opinion; products and services mentioned on that page don't have any relationship with Alibaba Cloud. If the content of the page makes you feel confusing, please write us an email, we will handle the problem within 5 days after receiving your email.

If you find any instances of plagiarism from the community, please send an email to: info-contact@alibabacloud.com and provide relevant evidence. A staff member will contact you within 5 working days.

A Free Trial That Lets You Build Big!

Start building with 50+ products and up to 12 months usage for Elastic Compute Service

  • Sales Support

    1 on 1 presale consultation

  • After-Sales Support

    24/7 Technical Support 6 Free Tickets per Quarter Faster Response

  • Alibaba Cloud offers highly flexible support services tailored to meet your exact needs.