#This is a continuation of the classification code, now using trees, random forests,
#logistic regression, and support vector classifiers
#Here agin is an initialization that will allow our results to be reproducible when rerun
set.seed(1)
#Enter the prior and define the two pdf's
#We'll use here f0 the uniform density and f1 the density that is the sum of the two
#input coordinates
g0<-.5
g1<-1-g0
density0 <- function(x1,x2) {
y<-1
return(y)
}
density1 <- function(x1,x2) {
y<-(x1+x2)
return(y)
}
#Here again is the Bayes/optimal classifier
Bayesclass <- function(x1,x2) {
if (g1*density1(x1,x2) > g0*density0(x1,x2)) {result<-1}
else {result<-0}
return(result)
}
#Specify the number of values of both x1 and x2 to use in a grid for plotting
h<-51
#Make vectors to hold both coordinates for the grid
X1<-rep(seq(1:h)-1,h)/(h-1)
X2<-rep(0:(h-1),each=h)/(h-1)
#Make a vector of values of the Bayes classifier for points on the grid
Class<-rep(0,h*h)
for (i in 1:(h*h)) {
Class[i]<-Bayesclass(X1[i],X2[i])
}
#Make a plot of the values of the Bayes classifier for points on the grid
plot(X1,X2,pch=20,col=Class,cex=2,lwd=1)
#Here is code for a rejection sampler to make observations from a density on the
#unit square based on proposal pairs uniform on (0,1)
#The code is based on the code at http://glau.ca/?p=227
#The argument A is a bound on the values of the density
rejsamp = function(A){
while(1){
# three indepednent uniforms
u = runif(3)
# Accept or reject candidate value; if rejected try again
if(A*u[3]< density1(u[1],u[2]))
return(u[1:2])
}
}
#Here is code for making a training sample sample of N from g0*density0 + g1*density1
N=400
train<-matrix(0,nrow=N,ncol=3)
observ = function (A) {
u = runif(3)
if(u[1]