Matma_AI_cyber/Projekt_2/projekt.R

69 lines
1.6 KiB
R
Raw Permalink Normal View History

library(numDeriv)
k <- 3
2022-06-15 18:51:01 +02:00
# obliczenie wartości x
f <- function(k,x){
return( x^(k-1) * (x + 3) * (x - 4) * (x - 7)^k )
}
2022-06-15 18:51:01 +02:00
# funkcja wzór
function_formula = expression(x^(k-1) * (x + 3) * (x - 4) * (x - 7)^k)
2022-06-15 18:51:01 +02:00
# pochodna wzó
derivative_formula <- D(function_formula, 'x')
x <- seq(-3, 8.5, by=0.1)
y <- f(k,x)
g <- eval(derivative_formula)
startPoint <- -2
grad.descent <- function(x0 = startPoint,
epsilon = 0.0001,
alpha = 0.00001,
i.max = 1e6,
k = 3){
2022-06-15 18:51:01 +02:00
# gradient
x <- x0
gradient <- eval(function_formula)
x.path <- x0
loss <- c()
for (i in 1:i.max){
x.new <- x0 + alpha * gradient # Update
x <- x.new
2022-06-15 18:51:01 +02:00
gradient <- eval(function_formula) # Gradient in new point
points(x = x.new, y = f(k,x.new), pch = 20, col = 'green', cex = 0.5)
currentLoss <- (f(k, x0) - f(k,x.new))^2
print(currentLoss)
loss <- append(loss, currentLoss )
if (currentLoss < epsilon){ # STOP
break
}
x0 <- x.new
x.path <- rbind(x.path, x.new)
}
return(list(x.new, x.path, i, loss))
}
plot(x, y, type="l", ylim = c(-15000, 30000))
lines(x, g, col="yellow")
abline(h = 0, col="gray")
2022-06-15 18:51:01 +02:00
result <- grad.descent(k = k)
2022-06-15 18:51:01 +02:00
# Wartość funkcji w znalezionym punkcie
round(f(k,result[[1]][1]), 3)
# Znaleziony punkt
round(result[[1]], 2)
2022-06-15 18:51:01 +02:00
# Staring point
points(x = startPoint, y = f(k,startPoint), pch = 20, col = 'red', cex = 2)
points(x = result[[1]], y = f(k,result[[1]]), pch = 20, col = 'blue', cex = 2)
2022-06-15 18:51:01 +02:00
# loss
plot(result[[4]], type="l")