/** @file * @brief Sample weights `entropy' in boosting neurons. * * $Id: nnboost.cpp 2078 2005-05-18 05:04:13Z ling $ * * We study what examples the sample weights indicate as ``difficult''. */ // possible choices: adaboost, dpboost, and logistic #define ADABOOST 1 #define LOGISTIC 2 #define DPBOOST 3 #define BOOST ADABOOST #include #include #include #include #include #include #include #include #if BOOST == ADABOOST || BOOST == LOGISTIC #include typedef lemga::AdaBoost Boost; #elif BOOST == DPBOOST #include typedef lemga::DPBoost Boost; #endif using namespace lemga; #if BOOST == ADABOOST || BOOST == DPBOOST typedef cost::exponential MyCost; const bool use_deriv = false; #elif BOOST == LOGISTIC typedef cost::logistic MyCost; const bool use_deriv = true; #endif int main (unsigned int argc, char *argv[]) { // get the command line options // 1. default values int method = 0; const char *method_name[] = { "stump", "pulse function", "Perceptron" }; UINT n_agg = 50; const char *outf = "margin.dat"; // 2. read arguments #define ARG_OFFSET 3 if (argc < ARG_OFFSET) { std::cerr << "Usage: " << argv[0] << " trf in method n_agg [output]\n"; return -1; } const UINT in = atoi(argv[2]); std::ifstream fd(argv[1]); if (!fd.is_open()) { std::cerr << "Data file open error\n"; return -2; } pDataSet dat = load_data(fd, (1L<<30)-1, in, 1); if (argc > ARG_OFFSET+0) method = atoi(argv[ARG_OFFSET+0]); if (argc > ARG_OFFSET+1) n_agg = atoi(argv[ARG_OFFSET+1]); if (argc > ARG_OFFSET+2) outf = argv[ARG_OFFSET+2]; // 3. set up set_seed(0); LearnModel *lm; switch (method) { case 0: lm = new Stump(in); break; case 1: lm = new Pulse(in); ((Pulse*) lm)->set_max_transitions(3); break; case 2: { NNLayer l1(in, 1); l1.set_weight_range(-0.5, 0.5); FeedForwardNN nn; nn.add_top(l1); nn.set_batch_mode(); nn.set_train_method(nn.CONJUGATE_GRADIENT); nn.set_parameter(1e-4, 0, 10); lm = nn.clone(); break; } default: std::cerr << "No such learning method\n"; return -4; } lm->set_log_file(NULL); MyCost cost; Boost ab(false, cost); ab.set_max_models(n_agg); ab.set_base_model(*lm); ab.use_gradient_descent(use_deriv); delete lm; std::cout << "Use " << method_name[method] << " with AdaBoost" << std::endl; ab.initialize(); ab.set_train_data(dat); ab.train(); const UINT n = ab.size(); std::ofstream fm(outf); for (UINT i = 1; i <= n; ++i) { ab.set_aggregation_size(i); REAL sum = 0; for (UINT j = 0; j < i; ++j) sum += ab.model_weight(j); std::cout << '.' << std::flush; fm << i << ' ' << sum; for (UINT j = 0; j < dat->size(); ++j) fm << ' ' << dat->y(j)[0] * ab.get_output(j)[0]; fm << '\n'; } return 0; }