-
Notifications
You must be signed in to change notification settings - Fork 0
/
BFGS_test_table.jl
157 lines (131 loc) · 4.14 KB
/
BFGS_test_table.jl
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
using CUTEst
using Distributions
using LinearAlgebra
using Printf
using Random
include("noiseFunctions.jl")
include("objectiveFunctions.jl")
# Initialize objective function
#nlp = CUTEstModel("SBRYBND", "-param", "N=10")
nlp = CUTEstModel("ROSENBR")
d = length(nlp.meta.x0)
objFunTrue = nlp_objective(nlp)
# Set minimum objective value according to SIF file for CUTEst problem above
obj_fun_min_val = 0
# Set gradient noise level
#epsilon_g = (1e-4)*norm(objFunTrue.grad_eval(nlp.meta.x0))
epsilon_g = 1e-4
# Set function noise level
#epsilon_f = (1e-4)*abs(objFunTrue.func_eval(nlp.meta.x0))
#epsilon_f = 1e-4
# Set noise type
objFun = nlp_objective_additive_noise(nlp,zero_noise(),ball_uniform_random_noise(d, epsilon_g))
#objFun = nlp_objective_additive_noise(nlp,symmetric_uniform_random_noise(epsilon_f),ball_uniform_random_noise(d, epsilon_g))
# Aggregate parameters
total_runs = 30
max_obj_fun_evals = 2000
best_true_obj_vals = zeros(total_runs)
curv_fail_counts = zeros(total_runs)
line_search_fail_counts = zeros(total_runs)
iter_counts = zeros(total_runs)
# Repeated algorithm runs
for r in 1:total_runs
global max_obj_fun_evals, iter_counts, best_true_obj_vals
# Optimization setup
x_0 = nlp.meta.x0
H_0 = I
# Backtracking line search parameters
alpha_0 = 1
c_1 = 1e-4
tau = 0.5
noise_tol = 0
max_backtracks = 45
# Termination parameters
termination_eps = 0
# Allocate storage
curv_fail_count = 0
line_search_fail_count = 0
obj_fun_eval_count = 0
best_true_obj_val = Inf
iter_count = 0
# BFGS Loop
x = x_0
H = H_0
f_old = Inf
f_old_true = Inf
g_old = zeros(d)
while obj_fun_eval_count < max_obj_fun_evals
iter_count += 1
alpha = alpha_0
# Initial noisy function and gradient evalutions
if obj_fun_eval_count == 0
f_old = objFun.func_eval(x)
f_old_true = objFunTrue.func_eval(x)
g_old = objFun.grad_eval(x)
obj_fun_eval_count += 1
end
x_old = x
p = -H*g_old
x = x_old + alpha*p
f_new = objFun.func_eval(x)
f_new_true = objFunTrue.func_eval(x)
g_new = objFun.grad_eval(x)
obj_fun_eval_count += 1
# Backtracking line search
line_search_fail = 0
while f_new > f_old + c_1*alpha*dot(g_old,p) + 2*noise_tol && obj_fun_eval_count < max_obj_fun_evals
line_search_fail +=1
if line_search_fail > max_backtracks
alpha = 0
else
alpha = tau*alpha
end
x = x_old + alpha*p
f_new = objFun.func_eval(x)
f_new_true = objFunTrue.func_eval(x)
g_new = objFun.grad_eval(x)
obj_fun_eval_count += 1
end
line_search_fail_count += line_search_fail
# Calculate secant condition parameters
s_k = x - x_old
y_k = g_new - g_old
y_k_dot_s_k = dot(s_k,y_k)
# Check curvature condition
if y_k_dot_s_k > 0
rho_k = 1/y_k_dot_s_k
H = (I - rho_k*s_k*y_k')*H*(I - rho_k*y_k*s_k') + rho_k*s_k*s_k'
else
@printf("Curvature condition failed!\n")
curv_fail_count += 1
end
@show(line_search_fail)
#@show(i,x,f_old,f_new)
@show(obj_fun_eval_count,f_old,f_new)
@show norm(objFunTrue.grad_eval(x))
if norm(g_new) <= termination_eps
@printf("Terminated within tolerance after %d iterations\n", i)
break
end
best_true_obj_val = min(f_old_true,f_new_true,best_true_obj_val)
f_old = f_new
f_old_true = f_new_true
g_old = g_new
@show r
end
@show(curv_fail_count)
@show(line_search_fail_count)
@show(norm(H))
best_true_obj_vals[r] = best_true_obj_val
curv_fail_counts[r] = curv_fail_count
line_search_fail_counts[r] = line_search_fail_count
iter_counts[r] = iter_count
end
# Print table statistics
@printf("====Table Values====\n")
@printf("BFGS Mean Log10 Optimality Gap: %1.1E\n", mean(log.(10,best_true_obj_vals.-obj_fun_min_val)))
@printf("BFGS Median Log10 Optimality Gap: %1.1E\n", median(log.(10,best_true_obj_vals.-obj_fun_min_val)))
@printf("BFGS Minimum Log10 Optimality Gap: %1.1E\n", minimum(log.(10,best_true_obj_vals.-obj_fun_min_val)))
@printf("BFGS Maximum Log10 Optimality Gap: %1.1E\n", maximum(log.(10,best_true_obj_vals.-obj_fun_min_val)))
@printf("BFGS Log10 Optimality Gap Variance: %1.1E\n", var(log.(10,best_true_obj_vals.-obj_fun_min_val)))
@printf("BFGS Mean Number of Iterations: %d\n", mean(iter_counts))