@misc{oai:ir.soken.ac.jp:00000773, author = {川喜田, 雅則 and カワキタ, マサノリ and KAWAKITA, Masanori}, month = {2016-02-17, 2016-02-17}, note = {The main objective is to study boosting methods in statistical classification. Several ensemble learning methods including boosting have attracted many researchers’ interests in the last decade. In particular, it has been reported that the boosting methods perform well in many practical classification problems. The boosting algorithm constructs an accurate classifier by combining several base classifiers, which are often at most slightly more accurate than random guess. While many researchers have studied the boosting methods, their success has still some mysterious aspects. More intensive theoretical studies are required to clarify such mysteries.
 We describe a survey on several ensemble learning methods. We set up the statistical classification problem and make some notations to develop discussion from learning theories. Some theoretical preliminaries for analyzing the performance of classification methods are also overviewed. Then, we survey some existing ensemble learning methods. In particular, we review theoretical properties of boosting methods, which have been clarified by several researchers.
 The application of AdaBoost with decision stumps to shark bycatch data from the Eastern Pacific Ocean tuna purse-seine fishery is described. Generalized additive models (GAMs) are one of the most widely-used tools for analyzing fisheries data. It is well known that AdaBoost is closely connected to logistic GAMs when appropriate base classifiers are used. We compared results of AdaBoost to those obtained from GAMs. Compared to the logistic GAM, the prediction performance of AdaBoost was more stable, even with correlated features. Standard deviations of the test error were often considerably smaller for AdaBoost than for the logistic GAM. In addition, AdaBoost score plots, graphical displays of the contribution of each feature to the discriminant function, were also more stable than score plots of the logistic GAM, particularly in regions of sparse data. AsymBoost, a variant of AdaBoost developed for binary classification of a skewed response variable, was also shown to be effective at reducing the false negative ratio without substantially increasing the overall test error. Boosting with decision stumps, however, may not capture complicated structures in general since decision stumps are considerably simple classifiers. Use of more complicated base classifiers possibly improves the approximation ability of boosting. However, several literatures have pointed out that the use of complicated base classifiers may increase the generalization error of boosting. In addition, it is difficult to find what types of base classifiers are appropriate to each problem without any prior knowledge.
 To overcome these difficulties, we propose a new method, the local boosting, that is a localized version of boosting method based on the idea similar to but not the same as the local likelihood. Application of the local likelihood may improve the approximation ability considerably but also increases the computational cost, which makes the algorithm infeasible. The local boosting, however, includes a simple device for computational feasibility. We show that the local boosting has the Bayes risk consistency in the framework of PAC learning. It is seen that the estimation error increases compared to the ordinary boosting with simple base classifiers when we use the ordinary boosting with more complicated base classifiers or when we use the local boosting. However, the increase caused by the local boosting is not large. When same base classifiers are used, the local boosting attains the Bayes risk consistency in wider situations than the ordinary boosting by controlling the trade-off between estimation error and approximation error. Several simulations confirm the theoretical results and the effectiveness of the local boosting over the ordinary boosting in both binary and multiclass classifications., application/pdf, 総研大甲第947号}, title = {Boosting method for local learning in statistical classification}, year = {} }