## model je Y=f_true(X) + epsilon sa E[epsilon]=0 i Var[epsilon]=sigma^2 ## Primjer 1 -- f_true nije linearna + velika varijanca f_true=function(x){return(x^2 + 5*cos(x^2))} sigma=10 # Primjer 2 -- f_true je linearna f_true=function(x){return(x)} sigma=2 ## Primjer 3 -- f_true nije linearna + mala varijanca f_true=function(x){return(x^2 + 5*cos(x^2))} sigma=1 set.seed(5) ## kako bi mogli ponoviti rezultate simulacije n=100 x_train=runif(n,1,4) eps_train=rnorm(n,0,sigma) y_train=f_true(x_train)+eps_train plot(x_train, y_train, type = "p", xlab="x", ylab="y") t=seq(1, 4, by=0.01) points(t, f_true(t), type = "l") lin_fit=lm(y_train~x_train) abline(lin_fit, col="blue") library(FNN) K1=18 ## u Primjeru 1 ovo je k za koji se postize najmanja testna greska knn_fit=knn.reg(train=x_train, test = cbind(t), y=y_train, k=K1) points(t, knn_fit\$pred, type="l", col="red") K2=2 ## knn_fit=knn.reg(train=x_train, test = cbind(t), y=y_train, k=K2) points(t, knn_fit\$pred, type="l", col="darkred") # -> overfitting u Primjerima 1 i 2! ### probati i druge k-ove! ## dodavanje opisa na graf legend(1, max(y_train), legend=c("Prava f", "Lin. model", paste("kNN (k=", K1, sep="", ")" ), paste("kNN (k=", K2, sep="", ")" )), col=c(1, "blue","red", "darkred", "red"), lty=c(1,1,1,1,1)) ##### greska na skupu za ucenje ("training error") train_error_lin=mean(lin_fit\$residuals^2) train_error_knn=numeric(n-1) for (K in 1:(n-1)) { knn_fit=knn.reg(train=x_train, test=cbind(x_train), y=y_train, k=K) train_error_knn[K]=mean((y_train-knn_fit\$pred)^2) } ##### testna greska N=10000 x_test=runif(N,1,4) eps_test=rnorm(N,0,sigma) y_test=f_true(x_test)+eps_test ## u praksi ne mozemo simulirati testni skup jer ne znamo ## distribuciju od (X,Y) y_lin_test=predict(lin_fit, newdata = data.frame(x_train=x_test)) test_error_lin=mean((y_test-y_lin_test)^2) test_error_knn=numeric(n-1) for (K in 1:(n-1)) { knn_fit=knn.reg(train=x_train, test=cbind(x_test), y=y_train, k=K) test_error_knn[K]=mean((y_test-knn_fit\$pred)^2) } M=max(test_error_knn) m=0 Kna_min_1=log(((n-1):1)^(-1)) plot(Kna_min_1, rev(test_error_knn), type="l", ylim=c(m,M), col="orange", xlab="log(1/k) (fleksibilnost kNN modela)", ylab="") knn_min=which.min(test_error_knn) points(-log(knn_min), min(test_error_knn), pch=15, col="orange") ## testna greska linearnog modela abline(h=test_error_lin, col="orange", lty=2) ## ireducibilna greska abline(h=sigma^2, col="red", lty=4) ### dodavanje i greske na skupu za ucenje points(Kna_min_1, rev(train_error_knn), type="l", col="blue") abline(h=train_error_lin, col="blue", lty=2) ## dodavanje opisa na graf legend(Kna_min_1[30], M, legend=c("Test error (kNN)", "Test error (Lin. model)", "Training error (kNN)", "Training error (Lin. model)", "Irreducible error"), col=c("orange", "orange","blue", "blue", "red"), lty=c(1,2,1,2,4))