Neural NetworksA Comprehensive Foundation 神经网络综合基础 源代码.docx
- 文档编号:27936289
- 上传时间:2023-07-06
- 格式:DOCX
- 页数:51
- 大小:30.08KB
Neural NetworksA Comprehensive Foundation 神经网络综合基础 源代码.docx
《Neural NetworksA Comprehensive Foundation 神经网络综合基础 源代码.docx》由会员分享,可在线阅读,更多相关《Neural NetworksA Comprehensive Foundation 神经网络综合基础 源代码.docx(51页珍藏版)》请在冰豆网上搜索。
NeuralNetworksAComprehensiveFoundation神经网络综合基础源代码
readme
--------------------
Notesonroutines
--------------------
TheseM-filesareUserContributedRoutineswhicharebeingredistributed
byTheMathWorks,uponrequest,onan"asis"basis.AUserContributed
RoutineisnotaproductofTheMathWorks,Inc.andTheMathWorksassumes
noresponsibilityforanyerrorsthatmayexistintheseroutines.
ThesefileswerecreatedunderMatlab5.1andusenospecifictoolboxes.
Examplesofrunningtheroutinesaregivenbelow.
Someoftheroutineshavebeenincorporatedinto"demo"programs.The
demoprogramsaresimplescriptsthatcalltheassociatedM-files.
AllroutineswrittenbyHughPasikaexceptfortheSVMwhichwasoriginally
composedbyAntonioArtes(that'swhythevariablesareallinSpanish)and
smoothedabitbyHughPasikaandtheICAmfilewhichwaswrittenbyHimesh
Madhuranath.
pasika@soma.mcmaster.ca
--------------------------------------------
BackPropagation
(section4.8)
--------------------------------------------
%1.makethedata
P=mk_data(500);
%2.startthebackpropalgorithm
[W1,b1,W2,b2,ep_err,a,end_ep]=bpm_train(P,4,2,2,.1,.5,500,0,0,0,0,0);
%3.checkthedecisionboundary
bpm_dec_bnds(W1,b1,W2,b2,.1);
%4.makeatestset
T=mk_data(10000);
%5.checktheaccuracy
[cor,uncor]=bpm_test(W1,b1,W2,b2,T);
%6.plotBayesiandecisionboundary
c=pl_circ([-2/30],2.34,.01,1);
--------------------------------------------
RadialBasisFunctions
(section5.14)
--------------------------------------------
%1.Makedataset
P=mk_data(200);
%2.TraintheRBF
w=rbf(P(1:
100,1:
2),P(:
1:
2),P(1:
100,3:
4),4,1);
%3.makeatestset
T=mk_data(500);
%4.getnetworkoutputswithtestset
rbfout=rbf_test(w,T(:
1:
2),P(:
1:
2),4);
%5.determinepercentcorrect
rbf_correct(rbfout,T(:
5));
%6.plotdecisionboundary
rbf_db(w,P(:
1:
2),4,.2)
--------------------------------------------
SupportVectorMachine
(section6.4)
--------------------------------------------
%1.makedataset
P=mk_data(200);
%2.runtheSVMroutine
[pesos,vect,b]=svm_rbf(P,8,1000,.01,.1);
%3.makeatestset
T=mk_data(200);
%4.testdata
[cu]=svm_test(T,pesos,vect,b,8);
%5.plotdecisionsurface
svm_dec_bnd(pesos,vect,b,8)
-------------------------------------------------
SelfOrganizingMap(2ddata,2dmap)
(section9.6)
-------------------------------------------------
%1.runthedemo
som_2d_demo
%plottingbetweeniterationshasbeentakenoutbutcaneasilybeadded
%byuncommentingline95insom_2d.m"som_pl_map(W,1,2);drawnow"
-------------------------------------------------
SelfOrganizingMap(2ddata,1dmap)
(section9.6)
-------------------------------------------------
%1.makethedata
P=rand(1000,2);
%2.orderingphase
[W1sp1]=som_1d(P,200,10,[.118]);
%3.convergencephase
[W2sp2]=som_1d(P,200,50,[p1
(1).001p1
(2)0],W1s);
--------------------------------------------
GeneralizedHebbianAlgorithm
(section8.6)
--------------------------------------------
%1.loadtheimagedata
loadgha_data
%2.setthecolormap
colormap(gray(256))
%3.runthealgorithm
W=gha_getweights(parn,2000,8,.0001);
%4.determinemaskcoefficients
%displaymasksandreconstructedimagewithunquantizedcoeffs
c=gha_getcoeffs(parn,W,1);
%5.quantizethecoeffecientsaccordinguserspecifiedbitrate
%andrecomposetheimage
[I,st,xla]=gha_quantcoeffs(c,W,parn,[77643322]);
%6.displaythereconstructedimage
subplot(2,2,4)
pim(I)
--------------------------------------------
IndependentComponentAnalysis
(section10.12)
--------------------------------------------
Thisexamplerunsasastandalonescript.
%1.ica
--------------------------------------------
BrainStateinaBox
(section14.11)
--------------------------------------------
%1.runtheroutine
c=bsb([-.3-.7],.9);
%2.subsequentplotswillbeheld
c=bsb([-.1-.7],.9);
--------------------------------------------
HopfieldNetwork
(section14.8)
--------------------------------------------
%1.runthedemo
hop_demo
bpm_dec_bnds
functionA=bpm_dec_bnds(W1,b1,W2,b2,st_sz)
%functionA=bpm_dec_bnds(W1,b1,W2,b2,st_sz)
%
%Determinesthedecisionboundariesusingtheweightscalculatedby
%thebpm_trainroutine.
%Theregionofinterestishardcoded(x=-6:
st_sz:
6;y=-4:
st_sz:
4;).
%st_szistheresolutionofthegridoverwhichtestingisperformed.
%
%HughPasika1997
x=-6:
st_sz:
6;y=-4:
st_sz:
4;
fori=1:
length(x),
forj=1:
length(y),
input=[x(i)y(j)]';
outHiddenLayer=bpm_phi(W1*input+b1);
outOutputLayer=bpm_phi(W2*outHiddenLayer+b2);
output=outOutputLayer;
ifoutput
(1)>output
(2)
A(j,i)=-1;
else
A(j,i)=1;
end
end
end
contour(x,y,A,1);
set(gca,'XLim',[-66]);
grid
title('DecisionBoundary')
bpm_phi
functionb=phi(a)
b=1./(1+exp(-a));
bpm_phi_d
functionb=phi_d(a)
b=(exp(-a))./((1+exp(-a)).^2);
bpm_test
function[cor,uncor,O,dec]=bpm_test(W1,b1,W2,b2,P)
%[cor,uncor,O,dec]=bpm_test(W1,b1,W2,b2,P)
%
%c-vectorsofonesandzeros-oneiftheoutputclassiscorrect
%d-actualclassoutputs
%O-rawoutputs
%dec-classdecision
%
%HughPasika1997
[patscP]=size(P);
fori=1:
pats,
input=P(i,1:
2)';
target=P(i,2+1:
2+2)';
outHiddenLayer=bpm_phi(W1*input+b1);
outOutputLayer=bpm_phi(W2*outHiddenLayer+b2);
O(i,1:
2)=outOutputLayer';
end
dec=(O(:
1) 2)); n_uncor=(sum(abs(dec-P(: 5)))); uncor=(sum(abs(dec-P(: 5)))/pats)*100; cor=((pats-n_uncor)/pats)*100; fprintf(1,'\npercentcorrect: %gpercentincorrect: %g\n\n',cor,uncor) bpm_train function[W1,b1,W2,b2,ep_err,initAvErr,end_epoch]=bpm_train(P,hN,oN,inD,lr,mom,epochs,W1i,b1i,W2i,b2i,stop_crit) %function[W1,b1,W2,b2,ep_err,initAvErr,end_epoch]= %bpm_train(P,hN,oN,inD,lr,mom,epochs,W1i,b1i,W2i, %b2i,stop_crit) % %W1,W2,b1,b2-weights,bias %ep_err-averageerroroverepoch %initAvErr-initialaverageerror(beforeanytraining) %end_epoch-ifstopcriteriaisspecified,thisistheepochthe %algorithmquitat % %P-rowwisetrainingvectorswithformat %[input1input2...target1target2..] %hN-numberofhiddenneurons %oN-numberofoutputneurons %inD-inputdimension %lr-learningrate %mom-momentum %epochs-numberofepochstotrainfor %W1i,b1i,W2i,b2i-initialweightsandbiases-ifsettozero %routineinitializeswithrandomvariables %stop_crit-percentageerrorchangebetweenepochsusedasstopping %criteria-settozeroifroutineistorun %for'epochs'epochs % %Thisfunctionperformsbplearningwithmomentumonasinglehiddenlayer %MLP.ItreturnstheweightsandbiasesasmatricesgivingtheMSEafter %eachepoch.Youcanpassitaninitialsetofweightsandbiasesifyou %sochoose.Thepatternsshouldberowvectors.Thenon-linearfunctionis %thestandardsigmoidalandisfoundinfilesbpm_phi.mandbpm_phi_d.m %(thederivative). % %[W1,b1,W2,b2,ep_err,a,end_ep]=bpm(P,4,2,2,.1,.5,5,0,0,0,0,0); % %HughPasikaJune1997 pep_err=10; if(nargin~=12),fprintf(1,'Wrongnumberofinputarguments.Exiting! \n\n');break;end fprintf(1,'\nThefirsttimethrough,thepercentchangeinaverageerrorismeaningless.\n\n') %-------------------------------------------------------- %initnetwork %-------------------------------------------------------- dflag=0; [num_patscP]=size(P); W1=rands(hN,inD+1);W2=rands(oN,hN+1); dW1l=W1*0;dW2l=W2*0; lr1=lr*ones(size(W1));lr2=lr*ones(size(W2)); %justforreferencesake,determinetheaverageerrorwiththerandomweights fori=1: num_pats, input=[P(i,1: inD)1]';target=P(i,inD+1: inD+oN)'; outHidden=bpm_phi(W1*input);outOutput=bpm_phi(W2*[outHidden'1]'); outputError=target-outOutput;errEpoch(i)=sqrt(sumsqr(outputError)); end initAvErr=sum(errEpoch)/num_pats; fprintf('Theinitialmeansquarederroris: %g\n\n',initAvErr) %-------------------------------------------------------- %enteringtrainingloop %-------------------------------------------------------- fprintf(1,'TrainingviaBackpropwithMomentum\n\n') epoch=1; dflag=0; whileepoch cc=clock; fprintf(1,'enteringtrainingloopforepoch%gof%gepochs\n',epoch,epochs); P=shuffle(P); fori=1: num_pats, input=[P(i,1: inD)1]'; target=P(i,inD+1: inD+oN)'; sumHidden=W1*input;outHidden=bpm_phi(sumHidden); sumOutput=W2*[outHidden'1]';outOutput=bpm_phi(sumOutput); outputError=target-bpm_phi(sumOutput);errEpoch(i)=sumsqr(outputError); dc=bpm_phi_d(sumOutput).*outputError; dW2=lr2.*dc(: ones(hN+1,1)).*[outHidden(: ones(oN,1))'ones(oN,1)]; db=bpm_phi_d(sumHidden).*(sum((W2(1: oN,1: hN)'.*dc(: ones(1,hN))'),2)); dW1=lr1.*db(: ones(inD+1,1)).*(input(: ones(hN,1))'); W1=W1+dW1+mom*dW1l;W2=W2+dW2+mom*dW2l; dW1l=dW1;dW2l=dW2; end ep_err(epoch)=sum(errEpoch)/num_pats; fprintf(1,'meansquareerror: %g\n',ep_err(epoch)) fprintf(1,'secondstotrainepoch: %g\n',etime(clock,cc)) diff_ep_err=100*(pep_err-ep_err(epoch))/pep_err; fprintf(1,'Percentchangeinaverageerrorforepochis: %g\n\n',diff_ep_err) ifabs(diff_ep_err) fprintf(1,'Trainingendedduetodeltaerrortermbeingexceededonepoch: %g\n\n',epoch) dflag=1; end_epoch=epoch; end pep_err=ep_err(epoch);%saveepoch_errforcalculatingepocherroronnextiter epoch=epoch+1; end b1=W1(: inD+1);b2=W2(: hN+1);W1=W1(: 1: inD);W2=W2(: 1: hN); bsb functionc=bsb(x,beta,multi
- 配套讲稿:
如PPT文件的首页显示word图标,表示该PPT已包含配套word讲稿。双击word图标可打开word文档。
- 特殊限制:
部分文档作品中含有的国旗、国徽等图片,仅作为作品整体效果示例展示,禁止商用。设计者仅对作品中独创性部分享有著作权。
- 关 键 词:
- Neural Networks Comprehensive Foundation 神经网络综合基础 源代码 神经网络 综合 基础
链接地址:https://www.bdocx.com/doc/27936289.html