clean-files

This commit is contained in:
Jakub Adamski 2022-06-15 18:51:01 +02:00
parent cc07821b22
commit 110d1eeb06
5 changed files with 159 additions and 89 deletions

2
.gitignore vendored
View File

@ -2,3 +2,5 @@
.Rhistory
.RData
.Ruserdata
.DS_Store

BIN
Projekt_2/loss.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 22 KiB

BIN
Projekt_2/minimum.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 29 KiB

143
Projekt_2/projekt-test.R Normal file
View File

@ -0,0 +1,143 @@
library(numDeriv)
# Function to calculate gradient
# @x - vector of values (2)
grad <- function(x){
return( (x - 7)^2 * x * (7*x^3 - 34*x^2 - 39*x + 168) )
}
#grad <- function(x){
# return()
#}
startPoint <- -2
f <- function(x){
return( x^2 * (x + 3) * (x - 4) * (x - 7)^3 )
}
# Function to minimize function
# @x0 - starting point
# @epsilon - maximum error
# @alpha - learning rate
# @i.max - maximum number of iterations
grad.descent <- function(x0 = startPoint,
epsilon = 0.01,
alpha = 0.00001,
i.max = 1e6){
gradient <- grad(x0) # Initialize gradient
x.path <- x0
loss <- c()
for (i in 1:i.max){
x.new <- x0 - alpha * gradient # Update
gradient <- grad(x.new) # Gradinet in new point
points(x = x.new, y = f(x.new), pch = 20, col = 'green', cex = 0.5)
currentLoss <- (f(x0) - f(x.new))^2
print(currentLoss)
loss <- append(loss, currentLoss )
if (currentLoss < epsilon){ # STOP
break
}
x0 <- x.new
x.path <- rbind(x.path, x.new)
}
return(list(x.new, x.path, i, loss))
}
x <- seq(-3, 8.5, by=0.1)
y <- f(x)
g <- grad(x)
zero <-
plot(x, y, type="l", ylim = c(-15000, 30000))
lines(x, g, col="yellow")
abline(h = 0, col="gray")
result <- grad.descent()
round(f(result[[1]][1]), 3) # Wartość funkcji w znalezionym punkcie
round(result[[1]], 2) # Znaleziony punkt
points(x = startPoint, y = f(startPoint), pch = 20, col = 'red', cex = 2) # Staring point
points(x = result[[1]], y = f(result[[1]]), pch = 20, col = 'blue', cex = 2)
plot(result[[4]], type="l")
# dodatek -----------------------------------------------------------------
#-----all
# Function to calculate gradient
# @x - vector of values (2)
# nie wiem czemu wczytywanie nie zamiania na int
#k <- as.integer(readline(prompt = ""))
k <- 3
#oblicznie warosći x
f <- function(k,x){
return( x^(k-1) * (x + 3) * (x - 4) * (x - 7)^k )
}
# fukcja - wzów
function_formula = expression(x^(k-1) * (x + 3) * (x - 4) * (x - 7)^k)
# pochodna - wzór
derivative_formula <- D(function_formula, 'x')
x <- seq(-3, 8.5, by=0.1)
y <- f(k,x)
g <- eval(derivative_formula)
zero <-
startPoint <- -2
# Function to minimize function
# @x0 - starting point
# @epsilon - maximum error
# @alpha - learning rate
# @i.max - maximum number of iterations
grad.descent <- function(x0 = startPoint,
epsilon = 0.0001,
alpha = 0.00001,
i.max = 1e6,
k = 3){
#gradient <- grad(x0) # Initialize gradient
x <- x0
gradient <- eval(function_formula)
x.path <- x0
loss <- c()
for (i in 1:i.max){
# tu zmieniłem z "-" na "+"
x.new <- x0 + alpha * gradient # Update
x <- x.new
gradient <- eval(function_formula) # Gradinet in new point
points(x = x.new, y = f(k,x.new), pch = 20, col = 'green', cex = 0.5)
currentLoss <- (f(k, x0) - f(k,x.new))^2
print(currentLoss)
loss <- append(loss, currentLoss )
if (currentLoss < epsilon){ # STOP
break
}
x0 <- x.new
x.path <- rbind(x.path, x.new)
}
return(list(x.new, x.path, i, loss))
}
plot(x, y, type="l", ylim = c(-15000, 30000))
lines(x, g, col="yellow")
abline(h = 0, col="gray")
result <- grad.descent()
round(f(k,result[[1]][1]), 3) # Wartość funkcji w znalezionym punkcie
round(result[[1]], 2) # Znaleziony punkt
points(x = startPoint, y = f(k,startPoint), pch = 20, col = 'red', cex = 2) # Staring point
points(x = result[[1]], y = f(k,result[[1]]), pch = 20, col = 'blue', cex = 2)
plot(result[[4]], type="l")

View File

@ -1,117 +1,38 @@
library(numDeriv)
# Function to calculate gradient
# @x - vector of values (2)
grad <- function(x){
return( (x - 7)^2 * x * (7*x^3 - 34*x^2 - 39*x + 168) )
}
#grad <- function(x){
# return()
#}
startPoint <- -2
f <- function(x){
return( x^2 * (x + 3) * (x - 4) * (x - 7)^3 )
}
# Function to minimize function
# @x0 - starting point
# @epsilon - maximum error
# @alpha - learning rate
# @i.max - maximum number of iterations
grad.descent <- function(x0 = startPoint,
epsilon = 0.01,
alpha = 0.00001,
i.max = 1e6){
gradient <- grad(x0) # Initialize gradient
x.path <- x0
loss <- c()
for (i in 1:i.max){
x.new <- x0 - alpha * gradient # Update
gradient <- grad(x.new) # Gradinet in new point
points(x = x.new, y = f(x.new), pch = 20, col = 'green', cex = 0.5)
currentLoss <- (f(x0) - f(x.new))^2
print(currentLoss)
loss <- append(loss, currentLoss )
if (currentLoss < epsilon){ # STOP
break
}
x0 <- x.new
x.path <- rbind(x.path, x.new)
}
return(list(x.new, x.path, i, loss))
}
x <- seq(-3, 8.5, by=0.1)
y <- f(x)
g <- grad(x)
zero <-
plot(x, y, type="l", ylim = c(-15000, 30000))
lines(x, g, col="yellow")
abline(h = 0, col="gray")
result <- grad.descent()
round(f(result[[1]][1]), 3) # Wartość funkcji w znalezionym punkcie
round(result[[1]], 2) # Znaleziony punkt
points(x = startPoint, y = f(startPoint), pch = 20, col = 'red', cex = 2) # Staring point
points(x = result[[1]], y = f(result[[1]]), pch = 20, col = 'blue', cex = 2)
plot(result[[4]], type="l")
# dodatek -----------------------------------------------------------------
#-----all
# Function to calculate gradient
# @x - vector of values (2)
# nie wiem czemu wczytywanie nie zamiania na int
#k <- as.integer(readline(prompt = ""))
k <- 3
#oblicznie warosći x
# obliczenie wartości x
f <- function(k,x){
return( x^(k-1) * (x + 3) * (x - 4) * (x - 7)^k )
}
# fukcja - wzów
# funkcja wzór
function_formula = expression(x^(k-1) * (x + 3) * (x - 4) * (x - 7)^k)
# pochodna - wzór
# pochodna wzó
derivative_formula <- D(function_formula, 'x')
x <- seq(-3, 8.5, by=0.1)
y <- f(k,x)
g <- eval(derivative_formula)
zero <-
startPoint <- -2
# Function to minimize function
# @x0 - starting point
# @epsilon - maximum error
# @alpha - learning rate
# @i.max - maximum number of iterations
grad.descent <- function(x0 = startPoint,
epsilon = 0.0001,
alpha = 0.00001,
i.max = 1e6,
k = 3){
#gradient <- grad(x0) # Initialize gradient
# gradient
x <- x0
gradient <- eval(function_formula)
x.path <- x0
loss <- c()
for (i in 1:i.max){
# tu zmieniłem z "-" na "+"
x.new <- x0 + alpha * gradient # Update
x <- x.new
gradient <- eval(function_formula) # Gradinet in new point
gradient <- eval(function_formula) # Gradient in new point
points(x = x.new, y = f(k,x.new), pch = 20, col = 'green', cex = 0.5)
currentLoss <- (f(k, x0) - f(k,x.new))^2
print(currentLoss)
@ -130,14 +51,18 @@ plot(x, y, type="l", ylim = c(-15000, 30000))
lines(x, g, col="yellow")
abline(h = 0, col="gray")
result <- grad.descent()
result <- grad.descent(k = k)
round(f(k,result[[1]][1]), 3) # Wartość funkcji w znalezionym punkcie
round(result[[1]], 2) # Znaleziony punkt
# Wartość funkcji w znalezionym punkcie
round(f(k,result[[1]][1]), 3)
# Znaleziony punkt
round(result[[1]], 2)
points(x = startPoint, y = f(k,startPoint), pch = 20, col = 'red', cex = 2) # Staring point
# Staring point
points(x = startPoint, y = f(k,startPoint), pch = 20, col = 'red', cex = 2)
points(x = result[[1]], y = f(k,result[[1]]), pch = 20, col = 'blue', cex = 2)
# loss
plot(result[[4]], type="l")