P=train_data(:,2:size(train_data,2))'; % 获取属性列加了转置
clear train_data; % Relea raw training data array
%%%%%%%%%%% Load testing datat
test_data=load(TestingData_File);
TV.T=test_data(:,1)';
TV.P=test_data(:,2:size(test_data,2))';
clear test_data; % Relea raw testing data array
NumberofTrainingData=size(P,2); % 训练集⼤⼩
NumberofTestingData=size(TV.P,2); % 测试集⼤⼩
NumberofInputNeurons=size(P,1); % 输⼊神经元数量,即属性个数
% 如果不是逻辑回归,即分类问题
if Elm_Type~=REGRESSION
%%%%%%%%%%%% Preprocessing the data of classification
sorted_target=sort(cat(2,T,TV.T),2); %训练集和测试的标签连起来并按从⼩到⼤顺序排列,组成⼀个⾏向量
label=zeros(1,1); % Find and save in 'label' class label from training and testing data ts
label(1,1)=sorted_target(1,1);
j=1;
for i = 2:(NumberofTrainingData+NumberofTestingData) % 利⽤循环把第⼀类标签统⼀到(1,1)第⼆类统⼀到(1,2), sorted_target已经从⼩到⼤排列
关于朋友的英语作文
if sorted_target(1,i) ~= label(1,j)
j=j+1;
label(1,j) = sorted_target(1,i);
end
end
number_class=j; % 类的数量
strengthenedbrunNumberofOutputNeurons=number_class; % 类的数量赋值给输出神经元的数量
%%%%%%%%%% Processing the targets of training
temp_T=zeros(NumberofOutputNeurons, NumberofTrainingData); % 输出神经元组成矩阵的⼀列,⽤于暂时存储训练集的输出
for i = 1:NumberofTrainingData % 将每个训练样本的标签弄到temp_T⾥。如总共有5个类,第⼀个训练样本属于第⼆个类,则temp_T第⼀列为[0;1;0;0;0]
for j = 1:number_class
if label(1,j) == T(1,i)
break;
end
end
temp_T(j,i)=1;
end
show fielT=temp_T*2-1; % temp_T矩阵的每个元素的数变化⼀下,如对于⼆分类,值为-1 或者1;T的⼤⼩变为标签数量*训练样本数量
%%%%%%%%%% Processing the targets of testing ⽅法跟处理训练集标签⼀样
temp_TV_T=zeros(NumberofOutputNeurons, NumberofTestingData);
英语翻译成中文for i = 1:NumberofTestingData
for j = 1:number_class
if label(1,j) == TV.T(1,i)
break;
end
end
temp_TV_T(j,i)=1;
end
TV.T=temp_TV_T*2-1;
end % end if of Elm_Type
%%%%%%%%%%% Calculate weights & bias
start_time_train=cputime; % 计算开始训练时刻,训练开始
%%%%%%%%%%% Random generate input weights InputWeight (w_i) and bias BiasofHiddenNeurons (b_i) of hidden neurons
%%%%%%%%%%% 随机产⽣隐层神经元的输⼊权 InputWeight (w_i)、偏置BiasofHiddenNeurons (b_i)
InputWeight=rand(NumberofHiddenNeurons,NumberofInputNeurons)*2-1; % 输⼊权重是⼀个隐层神经元数量*输⼊神经元数量的矩阵,元素InputWeight(l,n)就表⽰输⼊n与 % NumberofHiddenNeurons由主函数指定,NumberofInputNeurons输⼊神经元的数量(即属性BiasofHiddenNeurons=rand(NumberofHiddenNeurons,1); % NumberofHiddenNeurons由主函数指定,BiasofHiddenNeurons是⼀个列向量,⾏数等于隐层神经元数量tempH=InputWeight*P; % tempH是⼀个隐层数*训练样本数的矩阵
clear P; % Relea input of training data
ind=ones(1,NumberofTrainingData); % 元素为1的⾏向量
韩语你好怎么说BiasMatrix=BiasofHiddenNeurons(:,ind); % Extend the bias matrix BiasofHiddenNeurons to match the demention of H.
% 扩展偏置矩阵BiasofHiddenNeurons以匹配H的维数,有⾏数等于隐层列数等于1扩展成⾏数等于隐层列数等于训练样本数(与te tempH=tempH+BiasMatrix; % tempH作⽤于⼀个函数即为隐层输出
kasing
%%%%%%%%%%% Calculate hidden neuron output matrix H (计算输出矩阵H)
switch lower(ActivationFunction) % ActivationFunction由⽤户在主函数指定
ca {'sig','sigmoid'}
%%%%%%%% Sigmoid
H = 1 ./ (1 + exp(-tempH)); % H即为隐层输出,是⼀个隐层数*训练样本数的矩阵
H = 1 ./ (1 + exp(-tempH)); % H即为隐层输出,是⼀个隐层数*训练样本数的矩阵
ca {'sin','sine'}
%%%%%%%% Sine
H = sin(tempH);
ca {'hardlim'}
%%%%%%%% Hard Limit
H = double(hardlim(tempH));
ca {'tribas'}
%%%%%%%% Triangular basis function
H = tribas(tempH);
ca {'radbas'}
%%%%%%%% Radial basis function
H = radbas(tempH);
%%%%%%%% More activation functions can be added here
end
clear tempH; % Relea the temparary array for calculation of hidden neuron output matrix H
%%%%%%%%%%% Calculate output weights OutputWeight (beta_i)
%%%%%%%%%%% 计算输出权重β(⼤⼩为:隐层数*标签数量),β(l,m)为隐层l与输出层m的权重
OutputWeight=pinv(H') * T'; % implementation without regularization factor //refer to 2006 Neurocomputing paper
% H是⼀个隐层数*训练样本数的矩阵;pinv(H')是求H的⼴义逆矩阵,⼤⼩也为隐层数*训练样本数
% 标签矩阵T的⼤⼩在标签处理步骤中的85⾏变为:标签数量*训练样本数量,故OutputWeight(即笔记中的beta)⼤⼩为:隐层数*标签数%OutputWeight=inv(eye(size(H,1))/C+H * H') * H * T'; % faster method 1 //refer to 2012 IEEE TSMC-B paper
%implementation; one can t regularizaiton factor C properly in classification applications
%OutputWeight=(eye(size(H,1))/C+H * H') \ H * T'; % faster method 2 //refer to 2012 IEEE TSMC-B paper
%implementation; one can t regularizaiton factor C properly in classification applications
%If you u faster methods or kernel method, PLEASE CITE in your paper properly:
%Guang-Bin Huang, Hongming Zhou, Xiaojian Ding, and Rui Zhang, "Extreme Learning Machine for Regression and Multi-Class Classification," submitted to IEEE Transacti end_time_train=cputime; % 训练完成,计算结束时刻
TrainingTime=end_time_train-start_time_train; % Calculate CPU time (conds) spent for training ELM 计算训练耗时
%%%%%%%%%%% Calculate the training accuracy
Y=(H' * OutputWeight)'; % Y: the actual output of the training data. H是⼀个隐层数*训练样本数的矩阵;
% OutputWeight隐层数*标签数量,故Y的⼤⼩为:标签数量*训练样本数量,与真实标签矩阵⼤⼩⼀样
if Elm_Type == REGRESSION
TrainingAccuracy=sqrt(m(T - Y)); % Calculate training accuracy (RMSE) for regression ca
end
clear H;
%%%%%%%%%%% Calculate the output of testing input
start_time_test=cputime; % 计算开始测试时刻
tempH_test=InputWeight*TV.P;
clear TV.P; % Relea input of testing data
ind=ones(1,NumberofTestingData);
BiasMatrix=BiasofHiddenNeurons(:,ind); % Extend the bias matrix BiasofHiddenNeurons to match the demention of H
tempH_test=tempH_test + BiasMatrix;
switch lower(ActivationFunction)
ca {'sig','sigmoid'}
%%%%%%%% Sigmoid
H_test = 1 ./ (1 + exp(-tempH_test));
fcfs
ca {'sin','sine'}
%%%%%%%% Sine
H_test = sin(tempH_test);
ca {'hardlim'}
%%%%%%%% Hard Limit
H_test = hardlim(tempH_test);
ca {'tribas'}
%%%%%%%% Triangular basis function
H_test = tribas(tempH_test);
ca {'radbas'}日语在线发音
ikuo
%%%%%%%% Radial basis function
H_test = radbas(tempH_test);
%%%%%%%% More activation functions can be added here
end
TY=(H_test' * OutputWeight)'; % TY: the actual output of the testing data. OutputWeight是⽤训练集训练出来的
end_time_test=cputime; % 结束测试时刻
TestingTime=end_time_test-start_time_test; % Calculate CPU time (conds) spent by ELM predicting the whole testing data
if Elm_Type == REGRESSION
TestingAccuracy=sqrt(m(TV.T - TY)); % Calculate testing accuracy (RMSE) for regression ca
end