# przyklad 6.6 options(OutDec=",") library("ElemStatLearn") library("mlbench") library("rpart") library("adabag") tu jest bagging library("gbm") data(cpus) cpus2<-cpus[,-9] cpus2<-cpus2[,-1] cpus2$perf<-log10(cpus2$perf) m<-nrow(cpus2) test <- sample(1:m, size = m/3, replace = FALSE) cpus2.ucz<-cpus2[-test,] cpus2.test<-cpus2[test,] lzm<-ncol(cpus2)-1 gbm.cpus<-gbm(perf~ .,data=cpus2,var.monotone=c(-1,1,1,1,1,1), distribution="gaussian", n.trees=1000,interaction.depth=1,train.fraction = 2/3,cv.folds = 10) best.iter <- gbm.perf(gbm.cpus,method="OOB") print(best.iter) cc<-gbm.perf(gbm.cpus,plot.it = TRUE, oobag.curve = F, overlay = TRUE, method="test") # rys. 6.5 a ks<-seq(1,1000,1) a<-gbm.cpus$valid.error b<-gbm.cpus$train.error plot(ks,a,type="n",ylim=c(min(a,b),max(a,b)),main="Zbiór Cpus",ylab="MSE",xlab="Liczba modeli bazowych") lines(ks,a,lty=1) lines(ks,b,lty=2) legend("topright",lty=c(1,2),legend = c("zbiór testowy","zbiór uczący")) # rys. 6.5b plot(gbm.cpus,i.var = 1,continuous.resolution = best.iter,return.grid = F,main="Zbiór CPUS")