@@ -96,7 +96,7 @@ function objective_duality(x, auxdata)
9696 cost[res. Qjkn .== 0 ] .= 0 # Deal with cases Qjkn=kappa=0
9797
9898 # Compute constraint
99- cons = res. cjn .* graph. Lj . + dropdims (sum (res. Qjkn . + cost . - permutedims (res. Qjkn, (2 , 1 , 3 )), dims= 2 ), dims = 2 ) . - res. Yjn
99+ cons = res. cjn .* graph. Lj + dropdims (sum (res. Qjkn + cost - permutedims (res. Qjkn, (2 , 1 , 3 )), dims= 2 ), dims = 2 ) - res. Yjn
100100 cons = sum (Pjn .* cons, dims= 2 )
101101
102102 # Compute objective value
@@ -117,7 +117,7 @@ function gradient_duality(x::Vector{Float64}, grad_f::Vector{Float64}, auxdata)
117117 cost[res. Qjkn .== 0 ] .= 0 # Deal with cases Qjkn=kappa=0
118118
119119 # Compute constraint
120- cons = res. cjn .* graph. Lj . + dropdims (sum (res. Qjkn . + cost . - permutedims (res. Qjkn, (2 , 1 , 3 )), dims= 2 ), dims = 2 ) . - res. Yjn
120+ cons = res. cjn .* graph. Lj + dropdims (sum (res. Qjkn + cost - permutedims (res. Qjkn, (2 , 1 , 3 )), dims= 2 ), dims = 2 ) - res. Yjn
121121
122122 grad_f .= - cons[:] # Flatten the array and store in grad_f
123123 return
@@ -186,7 +186,7 @@ function hessian_duality(
186186
187187 termC = part1 .* repeat (graph. Lj ./ (graph. omegaj .* param. usecond .(res. cj, graph. hj)), param. N, graph. J * param. N)
188188
189- diff = Lambda' - Lambda
189+ diff = Lambda' - Lambda # P^n_k - P^n_j
190190 mat_kappa = repeat (kappa, param. N, param. N)
191191 part1 = 1 / (param. beta * (1 + param. beta)^ (1 / param. beta)) * Inm .* mat_kappa .^ (1 / param. beta)
192192 abs_diff_1betam1 = abs .(diff) .^ (1 / param. beta - 1 )
0 commit comments