Effacer les filtres
Effacer les filtres

Fmincon and loop for updating objective function

1 vue (au cours des 30 derniers jours)
Dat Tran
Dat Tran le 16 Fév 2016
Commenté : Dat Tran le 17 Fév 2016
Dear all,
How can I make a loop for the code below: every step, I want to update objective function in such a way that pr=p.
I would appreciate any thoughts and helps!
Dat
function [p,fval] = MC_NT_try2(p0,Aeq,beq,N)
if nargin < 5
opts = optimoptions('fmincon','Algorithm','interior-point','GradObj','on','DerivativeCheck','on');
end
M=length(p0);
p=nan(M,N);
fval=nan(1,N);
lb=zeros(64,1);
ub=ones(64,1);
for ii=1:N
[p(:,ii),fval(ii)] = fmincon(@fun,p0,[],[],Aeq,beq,lb,ub,[],opts);
end
function [f, gradf]= fun(p)
%%impose prior information
if ii==1
pr1=[0.3185 0.0001 0.1574 0.2902 0.0003 0.00001 0.8426 0.7098 0.6804 0.0822 0.00001 0.00001 0.0008 0.9177 0.00001 0.00001];
pr2=0.25*ones(64,1);
else
pr1=p(:,ii);
pr2=p(:,ii);
end
%%objective function
f1 = 0;
for i = 1:16
f1 = f1 + p(i)*log(p(i))-p(i)*log(pr1(i));
end
f2=0;
for j = 17:64
f2 = f2 + p(j)*log(p(j))-p(j)*log(pr2(j));
end
f=f1+f2;
%%gradient of objective function
if nargout > 1
gradf=[log(p(1))+1-log(pr1(1));
log(p(2))+1-log(pr1(2));
log(p(3))+1-log(pr1(3));
log(p(4))+1-log(pr1(4));
log(p(5))+1-log(pr1(5));
log(p(6))+1-log(pr1(6));
log(p(7))+1-log(pr1(7));
log(p(8))+1-log(pr1(8));
log(p(9))+1-log(pr1(9));
log(p(10))+1-log(pr1(10));
log(p(11))+1-log(pr1(11));
log(p(12))+1-log(pr1(12));
log(p(13))+1-log(pr1(13));
log(p(14))+1-log(pr1(14));
log(p(15))+1-log(pr1(15));
log(p(16))+1-log(pr1(16));
log(p(17))+1-log(pr2(17));
log(p(18))+1-log(pr2(18));
log(p(19))+1-log(pr2(19));
log(p(20))+1-log(pr2(20));
log(p(21))+1-log(pr2(21));
log(p(22))+1-log(pr2(22));
log(p(23))+1-log(pr2(23));
log(p(24))+1-log(pr2(24));
log(p(25))+1-log(pr2(25));
log(p(26))+1-log(pr2(26));
log(p(27))+1-log(pr2(27));
log(p(28))+1-log(pr2(28));
log(p(29))+1-log(pr2(29));
log(p(30))+1-log(pr2(30));
log(p(31))+1-log(pr2(31));
log(p(32))+1-log(pr2(32));
log(p(33))+1-log(pr2(33));
log(p(34))+1-log(pr2(34));
log(p(35))+1-log(pr2(35));
log(p(36))+1-log(pr2(36));
log(p(37))+1-log(pr2(37));
log(p(38))+1-log(pr2(38));
log(p(39))+1-log(pr2(39));
log(p(40))+1-log(pr2(40));
log(p(41))+1-log(pr2(41));
log(p(42))+1-log(pr2(42));
log(p(43))+1-log(pr2(43));
log(p(44))+1-log(pr2(44));
log(p(45))+1-log(pr2(45));
log(p(46))+1-log(pr2(46));
log(p(47))+1-log(pr2(47));
log(p(48))+1-log(pr2(48));
log(p(49))+1-log(pr2(49));
log(p(50))+1-log(pr2(50));
log(p(51))+1-log(pr2(51));
log(p(52))+1-log(pr2(52));
log(p(53))+1-log(pr2(53));
log(p(54))+1-log(pr2(54));
log(p(55))+1-log(pr2(55));
log(p(56))+1-log(pr2(56));
log(p(57))+1-log(pr2(57));
log(p(58))+1-log(pr2(58));
log(p(59))+1-log(pr2(59));
log(p(60))+1-log(pr2(60));
log(p(61))+1-log(pr2(61));
log(p(62))+1-log(pr2(62));
log(p(63))+1-log(pr2(63));
log(p(64))+1-log(pr2(64))];
end
  1 commentaire
Walter Roberson
Walter Roberson le 16 Fév 2016
gradf = log(p(:)+1) - log([ reshape(pr1(1:16),[],1); reshape(pr2(17:64),[],1)]);

Connectez-vous pour commenter.

Réponse acceptée

Walter Roberson
Walter Roberson le 16 Fév 2016
function [p, fval] = MC_NT_try2(p0, Aeq, beq, N, opts)
if nargin < 5
opts = optimoptions('fmincon', 'Algorithm', 'interior-point', 'GradObj', 'on', 'DerivativeCheck', 'on');
end
M = length(p0);
p = nan(M,N);
fval = nan(1,N);
lb = zeros(64,1);
ub = ones(64,1);
pr = [0.3185, 0.0001, 0.1574, 0.2902, 0.0003, 0.00001, 0.8426, 0.7098, 0.6804, 0.0822, 0.00001, 0.00001, 0.0008, 0.9177, 0.00001, 0.00001, 0.25 * ones(1, 64-16)];
p0 = p0(:);
pr = pr(:);
for ii=1:N
[pr, fval(ii)] = fmincon(@(p) fun(p, pr), p0, [], [], Aeq, beq, lb, ub, [], opts);
p(:,ii) = pr;
end
end
function [f, gradf] = fun(p, pr)
%%objective function
f = sum( p .* log(p) - p .* log(pr) );
%%gradient of objective function
if nargout > 1
gradf = log(p+1) - log(pr);
end
end

Plus de réponses (0)

Community Treasure Hunt

Find the treasures in MATLAB Central and discover how the community can help you!

Start Hunting!

Translated by