X-Boost  2.3.8
SingleTree.h
Go to the documentation of this file.
1 /* XBoost: Ada-Boost and Friends on Haar/ICF/HOG Features, Library and ToolBox
2  *
3  * Copyright (c) 2008-2014 Paolo Medici <medici@ce.unipr.it>
4  *
5  * This library is free software; you can redistribute it and/or
6  * modify it under the terms of the GNU Lesser General Public
7  * License as published by the Free Software Foundation; either
8  * version 2 of the License, or (at your option) any later version.
9  *
10  * This library is distributed in the hope that it will be useful,
11  * but WITHOUT ANY WARRANTY; without even the implied warranty of
12  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13  * Lesser General Public License for more details.
14  *
15  * You should have received a copy of the GNU Lesser General Public
16  * License along with this library; if not, write to the
17  * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
18  * Boston, MA 02111-1307, USA.
19  */
20 
21 #ifndef _SINGLE_TREE_H
22 #define _SINGLE_TREE_H
23 
25 
30 #include "BaseTrainer.h"
31 #include "BoostClassifier.h"
32 
35 class SingleTree: public BoostClassifier<HaarTreeClassifier>, public BaseTrainer {
36 
38  typedef std::vector<Classifier> ClassifierCollection;
39 
40  std::vector<Pattern> m_backup;
41  int m_backup_pattern;
42 
43 public:
44  SingleTree()
45  {
46  }
47 
49  template<class F>
50  void InitTrainer(F & f, int preload)
51  {
52  BaseTrainer::InitTrainer(preload);
53 
54  // Initialize m_count
55  TestFeatureGeneration(f, false);
56  }
57 
59  void ShutdownTrainer() { }
60 
62  void Restart() { }
63 
65  template<class FeatureGenerator>
66  bool Train(FeatureGenerator &f, bool preload) {
67 
68  std::cout << "SingleTree" << std::endl;
69 
70  Classifier bestH;
71 
72  // uso il BaseTrainer per ottenere comunque il migliore base Classifier
73  if(!BaseTrainer::Train(bestH.root, f, preload))
74  return false;
75 
76  // ora bisogna massimizzare localmente i corretti e gli sbagliati
77  BaseTrainer::EvaluateAndSet(bestH.root);
78 
79  std::swap(m_backup, m_templates);
80  m_backup_pattern = n_pattern;
81 
82  m_templates.clear();
83  n_pattern = 0;
84 
85  for(int i =0;i<m_backup_pattern;++i)
86  {
87  if( m_backup[i].test == 1 )
88  {
89  m_templates.push_back(m_backup[i]);
90  n_pattern++;
91  }
92  }
93 
94  std::cout << " === 1: " << n_pattern << " === " << std::endl;
95 // InitWeight();
96 
97  BaseTrainer::Train(bestH.pos, f, preload);
98 
99  m_templates.clear();
100  n_pattern = 0;
101 
102  for(int i =0;i<m_backup_pattern;++i)
103  {
104  if( m_backup[i].test == -1 )
105  {
106  m_templates.push_back(m_backup[i]);
107  n_pattern++;
108  }
109  }
110 
111  std::cout << " === -1: " << n_pattern << " === " << std::endl;
112 // InitWeight();
113 
114  BaseTrainer::Train(bestH.neg, f, preload);
115 
116  std::swap(m_templates, m_backup);
117  n_pattern = m_backup_pattern;
118  InitWeight();
119 
120  Post(bestH);
121 
122  // store
123  this->m_weak_classifiers.push_back(bestH);
124 
125  return true;
126  }
127 
129  bool Test();
130 
131 };
132 
133 #endif
Definition: FeatureGenerator.h:36
void ShutdownTrainer()
Freed temporary memory before next InitTrainer.
Definition: SingleTree.h:59
Definition: BoostClassifier.h:61
bool Test()
Test current network.
Definition: SingleTree.h:35
bool Train(FeatureGenerator &f, bool preload)
Esegue un singolo ciclo di training.
Definition: SingleTree.h:66
void InitTrainer(F &f, int preload)
Initialize Trainer.
Definition: SingleTree.h:50
a voting for majority classifier.
void Restart()
Reset weight and bootstrap from classifier.
Definition: SingleTree.h:62