program gradient_descent_1d

implicit none

integer, parameter :: pr = selected_real_kind(15,3)

integer :: i

real(kind=16) :: f,f_prime
real(pr) :: fp,h
real(pr) :: x0,y0,tmp_y 
integer :: nb_iter,nb_max_iter
real(pr) :: alpha,eps,cond 

alpha = 0.1 ! learning rate
nb_max_iter = 100 ! Nb max d'iteration
eps = 0.0001 ! stop condition

x0 = 1.5 ! start point
y0 = f(x0) 

cond = eps + 10.0 ! start with cond greater than eps (assumption)
nb_iter = 0 
tmp_y = y0
do while( cond > eps .and. nb_iter < nb_max_iter)
	x0 = x0 - alpha * f_prime(x0)
	y0 = f(x0)
	nb_iter = nb_iter + 1
	cond = abs( tmp_y - y0 )
	tmp_y = y0
	write(6,*) x0,y0,cond
end do
	
end program gradient_descent_1d

real(kind=16) function f(x)
implicit none
integer, parameter :: pr = selected_real_kind(15,3)
real(pr), intent(in) :: x
f = 3.0*x**2.0+2.0*x+1.0
end function f

real(kind=16) function f_prime(x)
implicit none
integer, parameter :: pr = selected_real_kind(15,3)
real(pr), intent(in) :: x
real(kind=16) :: f
real(pr) :: h
h = 0.001
f_prime = ( f(x+h) - f(x) ) / h
end function f_prime

About / Au sujet de

Created:
5 avril 2017 21:46:26

Updated:
5 avril 2017 21:46:26

License / Licence

MIT License

Abstract / Résumé