verbose         = FALSE
)
# export OOB error
hyper_grid_rlf$mse[i] <- mean((predict(fit,newdata =xtrain_valid)-ytrain_valid)^2)
}
hyper_grid_rlf %>%
arrange(mse) %>%
head(10)
ordered_hyper_grid_rlf <- hyper_grid_rlf[order(hyper_grid_rlf$mse),]
rf <- randomForest(
formula         = ytrain_train ~ .,
data            = xtrain_train,
ntree       = ordered_hyper_grid_rf$n.trees[1],
sampsize            =ceiling(ordered_hyper_grid_rf$sample.fraction[1]*nrow(xtrain_train))  ,
nodesize    = ordered_hyper_grid_rf$min.node.size[1],
verbose         = FALSE
)
mean((predict(rf,newdata =xtest)-ytest)^2)
#
rlf <-RLForest(
formula         = ytrain_train ~ .,
data            = xtrain_train,
ntree=100,
ntreesub=ordered_hyper_grid_rlf$ntreesub[1],
Lp = ordered_hyper_grid_rlf$Lp[1],
replace=FALSE,
verbose         = FALSE
) #RLForest(ytrain_train~.,data=xtrain_train,ntree=100,ntreesub=10,replace=FALSE,Lp=Lps[i])
mean((predict(rlf,newdata =xtest)-ytest)^2)
rf <- randomForest(
formula         = ytrain_train ~ .,
data            = xtrain_train,
ntree       = ordered_hyper_grid_rf$n.trees[1],
sampsize            =ceiling(ordered_hyper_grid_rf$sample.fraction[1]*nrow(xtrain_train))  ,
nodesize    = ordered_hyper_grid_rf$min.node.size[1],
verbose         = FALSE
)
mean((predict(rf,newdata =xtest)-ytest)^2)
#
rlf <-RLForest(
formula         = ytrain_train ~ .,
data            = xtrain_train,
ntree=100,
ntreesub=ordered_hyper_grid_rlf$ntreesub[1],
Lp = ordered_hyper_grid_rlf$Lp[1],
replace=FALSE,
verbose         = FALSE
) #RLForest(ytrain_train~.,data=xtrain_train,ntree=100,ntreesub=10,replace=FALSE,Lp=Lps[i])
mean((predict(rlf,newdata =xtest)-ytest)^2)
##random forest
#rf <- randomForest(ytrain~.,data=xtrain,ntree=100)
pred_rf <- predict(rf,newdata =xtest)
mean((pred_rf-ytest)^2)
#elf <- RLForest(ytrain~.,data=as.data.frame(xtrain),ntree=100,ntreesub=10,replace=FALSE,data_driven=TRUE)
pred_rlf <- predict(rlf,newdata =xtest)
mean((pred_rlf-ytest)^2)
result <- data.frame(x=xtest, y_pred_rf=pred_rf,y_pred_rlf=pred_rlf,sampley=ytest)
ordered_result <- result[order(result$x.x),]
test_acc_sin <- ordered_result#data.frame(Lps=Lps,test_MSE_RLF=mse_rlf_p,test_MSE_RF= rep(mse_rf_p,length(Lps)))
#
#  f6<-ggplot(test_acc_sin,aes(x=x))+
#      geom_line(aes(y = y_pred_rf,colour="rf"),linetype = "solid", size=1) +
#         geom_line(aes(y = y_pred_rlf,colour="rlf"),linetype = "solid", size=1) +
# stat_function(fun=fx,colour = "blue",linetype = "solid", size=2)+
#      scale_color_manual(values=c("rf"="red","rlf"="green"))+
#      theme(legend.position="none")+
#     ylab('y')+
#    theme(panel.grid.major = element_blank(), panel.grid.minor = element_blank(),
# panel.background = element_blank(), axis.line = element_line(colour = "black"),panel.border  = element_rect(color = "black",fill = NA,size = 1))+
#    theme(axis.text=element_text(size=50), legend.text = element_text(size=35))+
#     theme(axis.text.x = element_text(vjust = 0.5, hjust=1,size=40),axis.ticks.length=unit(0.5, "cm"),axis.title.x=element_text(size = 40))+
#  theme(axis.text.y = element_text(vjust = 0.5, hjust=1,size=40),axis.ticks.length=unit(0.5, "cm"),axis.title.y=element_text(size = 30))
# f6
f7<-ggplot(test_acc_sin,aes(x=x.x))+
geom_point(aes(y = sampley,color='samply'),size=2) +
geom_line(aes(y = y_pred_rf,color="rf"), size=1) +
geom_line(aes(y = y_pred_rlf,color="rlf"), size=1) +
stat_function(fun=fx,colour = "blue",linetype = "solid", size=2)+
scale_color_manual(values=c('samply'="black","rf"="red","rlf"="green"))+
theme(legend.position="none")+
xlab('x')+
ylab('y')+
theme(panel.grid.major = element_blank(), panel.grid.minor = element_blank(),
panel.background = element_blank(), axis.line = element_line(colour = "black"),panel.border  = element_rect(color = "black",fill = NA,size = 1))+
theme(axis.text=element_text(size=50), legend.text = element_text(size=35))+
theme(axis.text.x = element_text(vjust = 0.5, hjust=1,size=40),axis.ticks.length=unit(0.5, "cm"),axis.title.x=element_text(size = 40))+
theme(axis.text.y = element_text(vjust = 0.5, hjust=1,size=40),axis.ticks.length=unit(0.5, "cm"),axis.title.y=element_text(size = 30))
f7
##random forest
#rf <- randomForest(ytrain~.,data=xtrain,ntree=100)
pred_rf <- predict(rf,newdata =xtest)
mean((pred_rf-ytest)^2)
#elf <- RLForest(ytrain~.,data=as.data.frame(xtrain),ntree=100,ntreesub=10,replace=FALSE,data_driven=TRUE)
pred_rlf <- predict(rlf,newdata =xtest)
mean((pred_rlf-ytest)^2)
result <- data.frame(x=xtest, y_pred_rf=pred_rf,y_pred_rlf=pred_rlf,sampley=ytest)
ordered_result <- result[order(result$x.x),]
test_acc_sin <- ordered_result#data.frame(Lps=Lps,test_MSE_RLF=mse_rlf_p,test_MSE_RF= rep(mse_rf_p,length(Lps)))
#
#  f6<-ggplot(test_acc_sin,aes(x=x))+
#      geom_line(aes(y = y_pred_rf,colour="rf"),linetype = "solid", size=1) +
#         geom_line(aes(y = y_pred_rlf,colour="rlf"),linetype = "solid", size=1) +
# stat_function(fun=fx,colour = "blue",linetype = "solid", size=2)+
#      scale_color_manual(values=c("rf"="red","rlf"="green"))+
#      theme(legend.position="none")+
#     ylab('y')+
#    theme(panel.grid.major = element_blank(), panel.grid.minor = element_blank(),
# panel.background = element_blank(), axis.line = element_line(colour = "black"),panel.border  = element_rect(color = "black",fill = NA,size = 1))+
#    theme(axis.text=element_text(size=50), legend.text = element_text(size=35))+
#     theme(axis.text.x = element_text(vjust = 0.5, hjust=1,size=40),axis.ticks.length=unit(0.5, "cm"),axis.title.x=element_text(size = 40))+
#  theme(axis.text.y = element_text(vjust = 0.5, hjust=1,size=40),axis.ticks.length=unit(0.5, "cm"),axis.title.y=element_text(size = 30))
# f6
f7<-ggplot(test_acc_sin,aes(x=x.x))+
geom_point(aes(y = sampley,color='samply'),size=1) +
geom_line(aes(y = y_pred_rf,color="rf"), size=1) +
geom_line(aes(y = y_pred_rlf,color="rlf"), size=1) +
stat_function(fun=fx,colour = "blue",linetype = "solid", size=2)+
scale_color_manual(values=c('samply'="orange","rf"="red","rlf"="green"))+
theme(legend.position="none")+
xlab('x')+
ylab('y')+
theme(panel.grid.major = element_blank(), panel.grid.minor = element_blank(),
panel.background = element_blank(), axis.line = element_line(colour = "black"),panel.border  = element_rect(color = "black",fill = NA,size = 1))+
theme(axis.text=element_text(size=50), legend.text = element_text(size=35))+
theme(axis.text.x = element_text(vjust = 0.5, hjust=1,size=40),axis.ticks.length=unit(0.5, "cm"),axis.title.x=element_text(size = 40))+
theme(axis.text.y = element_text(vjust = 0.5, hjust=1,size=40),axis.ticks.length=unit(0.5, "cm"),axis.title.y=element_text(size = 30))
f7
library(plotly)
xSeq = seq(-2, 2, 0.01)
set.seed(42)
n1  = 3000
x = rnorm(n1)
p = sample(c(1,2),size=n1,prob=c(1/2,1/2),replace=TRUE)
y <- c()
for(i in 1:n1){
error <- rnorm(1)
if(p[i]==1){
y[i] <- 5*x[i]+error
}else if(p[i]==2){
y[i] <- 10+5*x[i] +error
}
}
data <- data.frame(intercept=1,x=x,y=y)
#train_index <- sample(1:nrow(data),size = 2500)
train_index <- createDataPartition(data$y, p =0.8,
list = FALSE,
times = 1)
xtrain <- data[train_index,1:2]
ytrain <- data[train_index,3]
validation_index<- createDataPartition(ytrain,
p = .75,
list = FALSE,
times = 1)
xtrain_train <- xtrain[validation_index,1:2]
ytrain_train <- ytrain[validation_index]
xtrain_valid <- xtrain[-validation_index,1:2]
ytrain_valid <- ytrain[-validation_index]
xtest <- data[-train_index,1:2]
ytest <- data[-train_index,3]
result <- data.frame(x=xtest,sampley=ytest,xeq= seq(-2, 2, length=600))
ordered_result <- result[order(result[,2]),]
fx1 <- function(x){
5*x
}
fx2 <- function(x){
5*x+10
}
f9<-ggplot(result,aes(x=xeq))+
xlab('x')+
stat_function(fun=fx1,colour = "blue",linetype = "solid", size=2)+
stat_function(fun=fx2,colour = "blue",linetype = "solid", size=2)+
scale_color_manual(values=c('samply'="orange","rf"="red","rlf"="green"))+
geom_point(aes(x=x.x,y = sampley,color='samply'),size=2) +
theme(legend.position="none")+
ylab('y')+
theme(panel.grid.major = element_blank(), panel.grid.minor = element_blank(),
panel.background = element_blank(), axis.line = element_line(colour = "black"),panel.border  = element_rect(color = "black",fill = NA,size = 1))+
theme(axis.text=element_text(size=50), legend.text = element_text(size=35))+
theme(axis.text.x = element_text(vjust = 0.5, hjust=1,size=40),axis.ticks.length=unit(0.5, "cm"),axis.title.x=element_text(size = 40))+
theme(axis.text.y = element_text(vjust = 0.5, hjust=1,size=40),axis.ticks.length=unit(0.5, "cm"),axis.title.y=element_text(size = 30))
f9
set.seed(42)
hyper_grid_rf <- expand.grid(
sample.fraction = c(.5, .63, .8),
min.node.size = c( 5, 10,15),
n.trees = c(50,100,150,200),
mse=NA
)
# execute full cartesian grid search
for(i in seq_len(nrow(hyper_grid_rf))) {
# fit model for ith hyperparameter combination
fit <- randomForest(
formula         = ytrain_train ~ .,
data            = xtrain_train,
ntree       = hyper_grid_rf$n.trees[i],
sampsize            =ceiling(hyper_grid_rf$sample.fraction[i]*nrow(xtrain_train))  ,
nodesize    = hyper_grid_rf$min.node.size[i],
verbose         = FALSE
)
# export OOB error
hyper_grid_rf$mse[i] <- mean((predict(fit,newdata =xtrain_valid)-ytrain_valid)^2)
}
hyper_grid_rf %>%
arrange(mse) %>%
head(10)
ordered_hyper_grid_rf <- hyper_grid_rf[order(hyper_grid_rf$mse),]
hyper_grid_rlf <- expand.grid(
Lp=c(0.2,0.4,0.6,0.8),
ntreesub=c(10,20,50),
mse=NA
)
# execute full cartesian grid search
for(i in seq_len(nrow(hyper_grid_rlf))) {
# fit model for ith hyperparameter combination
fit <- RLForest(
formula         = ytrain_train ~ .,
data            = xtrain_train,
ntree       = 100,
replace         = FALSE,
ntreesub=hyper_grid_rlf$ntreesub[i],
Lp = hyper_grid_rlf$Lp[i],
verbose         = FALSE
)
# export OOB error
hyper_grid_rlf$mse[i] <- mean((predict(fit,newdata =xtrain_valid)-ytrain_valid)^2)
}
hyper_grid_rlf %>%
arrange(mse) %>%
head(10)
ordered_hyper_grid_rlf <- hyper_grid_rlf[order(hyper_grid_rlf$mse),]
rf <- randomForest(
formula         = ytrain_train ~ .,
data            = xtrain_train,
ntree       = ordered_hyper_grid_rf$n.trees[1],
sampsize            =ceiling(ordered_hyper_grid_rf$sample.fraction[1]*nrow(xtrain_train))  ,
nodesize    = ordered_hyper_grid_rf$min.node.size[1],
verbose         = FALSE
)
mean((predict(rf,newdata =xtest)-ytest)^2)
#
rlf <-RLForest(
formula         = ytrain_train ~ .,
data            = xtrain_train,
ntree=100,
ntreesub=ordered_hyper_grid_rlf$ntreesub[1],
Lp = hyper_grid_rlf$Lp[1],
replace=FALSE,
verbose         = FALSE
) #RLForest(ytrain_train~.,data=xtrain_train,ntree=100,ntreesub=10,replace=FALSE,Lp=Lps[i])
mean((predict(rlf,newdata = xtest)-ytest)^2)
##random forest
#rf <- randomForest(ytrain~.,data=xtrain,ntree=100)
pred_rf <- predict(rf,newdata =xtest)
mean((pred_rf-ytest)^2)
#elf <- RLForest(ytrain~.,data=as.data.frame(xtrain),ntree=100,ntreesub=10,replace=FALSE,data_driven=TRUE)
pred_rlf <- predict(rlf,newdata =xtest)
mean((pred_rlf-ytest)^2)
result <- data.frame(x=xtest, y_pred_rf=pred_rf,y_pred_rlf=pred_rlf,sampley=ytest)
ordered_result <- result[order(result$x.x),]
test_acc_sin <- ordered_result#data.frame(Lps=Lps,test_MSE_RLF=mse_rlf_p,test_MSE_RF= rep(mse_rf_p,length(Lps)))
f8<-ggplot(test_acc_sin,aes(x=x.x))+
stat_function(fun=fx1,colour = "blue",linetype = "solid", size=2)+
stat_function(fun=fx2,colour = "blue",linetype = "solid", size=2)+
geom_point(aes(y = sampley,color='samply'),size=1) +
geom_line(aes(y = y_pred_rf,color="rf"), size=1) +
geom_line(aes(y = y_pred_rlf,color="rlf"), size=1) +
stat_function(fun=fx,colour = "blue",linetype = "solid", size=2)+
scale_color_manual(values=c('samply'="orange","rf"="red","rlf"="green"))+
theme(legend.position="none")+
xlab('x')+
ylab('y')+
theme(panel.grid.major = element_blank(), panel.grid.minor = element_blank(),
panel.background = element_blank(), axis.line = element_line(colour = "black"),panel.border  = element_rect(color = "black",fill = NA,size = 1))+
theme(axis.text=element_text(size=50), legend.text = element_text(size=35))+
theme(axis.text.x = element_text(vjust = 0.5, hjust=1,size=40),axis.ticks.length=unit(0.5, "cm"),axis.title.x=element_text(size = 40))+
theme(axis.text.y = element_text(vjust = 0.5, hjust=1,size=40),axis.ticks.length=unit(0.5, "cm"),axis.title.y=element_text(size = 30))
f8
##random forest
#rf <- randomForest(ytrain~.,data=xtrain,ntree=100)
pred_rf <- predict(rf,newdata =xtest)
mean((pred_rf-ytest)^2)
#elf <- RLForest(ytrain~.,data=as.data.frame(xtrain),ntree=100,ntreesub=10,replace=FALSE,data_driven=TRUE)
pred_rlf <- predict(rlf,newdata =xtest)
mean((pred_rlf-ytest)^2)
result <- data.frame(x=xtest, y_pred_rf=pred_rf,y_pred_rlf=pred_rlf,sampley=ytest)
ordered_result <- result[order(result$x.x),]
test_acc_sin <- ordered_result#data.frame(Lps=Lps,test_MSE_RLF=mse_rlf_p,test_MSE_RF= rep(mse_rf_p,length(Lps)))
f8<-ggplot(test_acc_sin,aes(x=x.x))+
stat_function(fun=fx1,colour = "blue",linetype = "solid", size=2)+
stat_function(fun=fx2,colour = "blue",linetype = "solid", size=2)+
geom_point(aes(y = sampley,color='samply'),size=1) +
geom_line(aes(y = y_pred_rf,color="rf"), size=1) +
geom_line(aes(y = y_pred_rlf,color="rlf"), size=1) +
scale_color_manual(values=c('samply'="orange","rf"="red","rlf"="green"))+
theme(legend.position="none")+
xlab('x')+
ylab('y')+
theme(panel.grid.major = element_blank(), panel.grid.minor = element_blank(),
panel.background = element_blank(), axis.line = element_line(colour = "black"),panel.border  = element_rect(color = "black",fill = NA,size = 1))+
theme(axis.text=element_text(size=50), legend.text = element_text(size=35))+
theme(axis.text.x = element_text(vjust = 0.5, hjust=1,size=40),axis.ticks.length=unit(0.5, "cm"),axis.title.x=element_text(size = 40))+
theme(axis.text.y = element_text(vjust = 0.5, hjust=1,size=40),axis.ticks.length=unit(0.5, "cm"),axis.title.y=element_text(size = 30))
f8
ordered_hyper_grid_rlf
library(foreign)
library(RiemannLebesgueForest)
library(randomForest)
library(MASS)
library(caret)
#clean up memory
rm(list=ls())
gc()
set.seed(42)
superconductivity <- read.arff('superconductivity.arff')
df<-superconductivity[sample(nrow(superconductivity)),]
folds <- createFolds(df$critical_temp, k = 10,list = FALSE)#cut(seq(1,nrow(df)),breaks=10,labels=FALSE)
#folds <- cut(seq(1,nrow(df)),breaks=10,labels=FALSE)
mse_rf <- c()
mse_rlf <- c()
for(i in 1:10){
testIndexes <- which(folds==i,arr.ind=TRUE)
xtest<- df[testIndexes, -82]
#xtest<- xtest[,-2]
ytest <- df[testIndexes,82]
xtrain <- df[-testIndexes, -82]
#xtrain <- xtrain[,-2]
ytrain <- df[-testIndexes,82]
validation_index<- createDataPartition(ytrain,
p = 0.25,
list = FALSE,
times = 1)
xtrain_train <- xtrain[-validation_index,]
ytrain_train <- ytrain[-validation_index]
xtrain_valid <- xtrain[validation_index,]
ytrain_valid <- ytrain[validation_index]
hyper_grid_rlf <- expand.grid(
Lp=c(0.4,0.6,0.8),
mse=NA
)
for(i in seq_len(nrow(hyper_grid_rlf))) {
# fit model for ith hyperparameter combination
fit <- RLForest(
formula         = ytrain_train ~ .,
data            = xtrain_train,
ntree       = 100,
replace         = FALSE,
ntreesub=10,
Lp = hyper_grid_rlf$Lp[i],
verbose         = FALSE
)
# export OOB error
hyper_grid_rlf$mse[i] <- mean((predict(fit,newdata =xtrain_valid)-ytrain_valid)^2)
}
ordered_hyper_grid_rlf <- hyper_grid_rlf[order(hyper_grid_rlf$mse),]
hyper_grid_rf <- expand.grid(
min.node.size = c( 5, 10,15),
n.trees = c(50,100,150,200),
mse=NA
)
# execute full cartesian grid search
for(i in seq_len(nrow(hyper_grid_rf))) {
# fit model for ith hyperparameter combination
fit <- randomForest(
formula         = ytrain_train ~ .,
data            = xtrain_train,
ntree       = hyper_grid_rf$n.trees[i],
nodesize    = hyper_grid_rf$min.node.size[i],
verbose         = FALSE
)
# export OOB error
hyper_grid_rf$mse[i] <- mean((predict(fit,newdata =xtrain_valid)-ytrain_valid)^2)
}
ordered_hyper_grid_rf <- hyper_grid_rf[order(hyper_grid_rf$mse),]
rf <- randomForest(
formula         = ytrain_train ~ .,
data            = xtrain_train,
ntree       = ordered_hyper_grid_rf$n.trees[1],
nodesize    = ordered_hyper_grid_rf$min.node.size[1],
verbose         = FALSE
)
mse_rf[k] <-mean((predict(rf,newdata =xtest)-ytest)^2)
#
rlf <-RLForest(
formula         = ytrain_train ~ .,
data            = xtrain_train,
ntree=100,
ntreesub=10,
Lp = ordered_hyper_grid_rlf$Lp[1],
replace=FALSE,
verbose         = FALSE
) #RLForest(ytrain_train~.,data=xtrain_train,ntree=100,ntreesub=10,replace=FALSE,Lp=Lps[i])
mse_rlf[k]<- mean((predict(rlf,newdata = xtest)-ytest)^2)
rm(rf)
rm(rlf)
rm(xtest)
rm(ytest)
rm(xtrain)
rm(ytrain)
rm(xtrain_train)
rm(ytrain_train)
rm(xtrain_valid)
rm(xtrain_valid)
gc()
}
k
mse_rlf
library(foreign)
library(RiemannLebesgueForest)
library(randomForest)
library(MASS)
library(caret)
#clean up memory
rm(list=ls())
gc()
set.seed(42)
superconductivity <- read.arff('superconductivity.arff')
df<-superconductivity[sample(nrow(superconductivity)),]
folds <- createFolds(df$critical_temp, k = 10,list = FALSE)#cut(seq(1,nrow(df)),breaks=10,labels=FALSE)
#folds <- cut(seq(1,nrow(df)),breaks=10,labels=FALSE)
mse_rf <- c()
mse_rlf <- c()
for(k in 1:5){
testIndexes <- which(folds==k,arr.ind=TRUE)
xtest<- df[testIndexes, -82]
#xtest<- xtest[,-2]
ytest <- df[testIndexes,82]
xtrain <- df[-testIndexes, -82]
#xtrain <- xtrain[,-2]
ytrain <- df[-testIndexes,82]
validation_index<- createDataPartition(ytrain,
p = 0.25,
list = FALSE,
times = 1)
xtrain_train <- xtrain[-validation_index,]
ytrain_train <- ytrain[-validation_index]
xtrain_valid <- xtrain[validation_index,]
ytrain_valid <- ytrain[validation_index]
hyper_grid_rlf <- expand.grid(
Lp=c(0.4,0.6,0.8),
mse=NA
)
for(i in seq_len(nrow(hyper_grid_rlf))) {
# fit model for ith hyperparameter combination
fit <- RLForest(
formula         = ytrain_train ~ .,
data            = xtrain_train,
ntree       = 100,
replace         = FALSE,
ntreesub=10,
Lp = hyper_grid_rlf$Lp[i],
verbose         = FALSE
)
# export OOB error
hyper_grid_rlf$mse[i] <- mean((predict(fit,newdata =xtrain_valid)-ytrain_valid)^2)
}
ordered_hyper_grid_rlf <- hyper_grid_rlf[order(hyper_grid_rlf$mse),]
hyper_grid_rf <- expand.grid(
min.node.size = c( 5, 10,15),
n.trees = c(50,100,150,200),
mse=NA
)
# execute full cartesian grid search
for(i in seq_len(nrow(hyper_grid_rf))) {
# fit model for ith hyperparameter combination
fit <- randomForest(
formula         = ytrain_train ~ .,
data            = xtrain_train,
ntree       = hyper_grid_rf$n.trees[i],
nodesize    = hyper_grid_rf$min.node.size[i],
verbose         = FALSE
)
# export OOB error
hyper_grid_rf$mse[i] <- mean((predict(fit,newdata =xtrain_valid)-ytrain_valid)^2)
}
ordered_hyper_grid_rf <- hyper_grid_rf[order(hyper_grid_rf$mse),]
rf <- randomForest(
formula         = ytrain_train ~ .,
data            = xtrain_train,
ntree       = ordered_hyper_grid_rf$n.trees[1],
nodesize    = ordered_hyper_grid_rf$min.node.size[1],
verbose         = FALSE
)
mse_rf[k] <-mean((predict(rf,newdata =xtest)-ytest)^2)
#
rlf <-RLForest(
formula         = ytrain_train ~ .,
data            = xtrain_train,
ntree=100,
ntreesub=10,
Lp = ordered_hyper_grid_rlf$Lp[1],
replace=FALSE,
verbose         = FALSE
) #RLForest(ytrain_train~.,data=xtrain_train,ntree=100,ntreesub=10,replace=FALSE,Lp=Lps[i])
mse_rlf[k]<- mean((predict(rlf,newdata = xtest)-ytest)^2)
rm(rf)
rm(rlf)
rm(xtest)
rm(ytest)
rm(xtrain)
rm(ytrain)
rm(xtrain_train)
rm(ytrain_train)
rm(xtrain_valid)
rm(xtrain_valid)
gc()
}
cat('\nMSE of RF:',mean(mse_rf))
cat('\nMSE of LF:',mean(mse_rlf))
cat('\n margin of error of RF:',qt(0.975,4)* sd(mse_rf)/sqrt(5)  )
cat('\n margin of error RLF:',qt(0.975,4)* sd(mse_rlf)/sqrt(5) )
rlf_superconductivitytune <- data.frame(lf=mse_rlf,rf=mse_rf)
write.csv(rlf_superconductivitytune,'rlf_superconductivitytune.csv')
