* HeadPoseEstimationUsingANN: http://enginius.tistory.com/307
이번에는 LDA(Linear Discriminant Analysis)를 이용해서 같은 작업을 수행하였다.
* LDA: http://enginius.tistory.com/310
먼저 training data로 LDA를 수행하고, 구해진 W Matrix의 성능을 label과 비교해서 확인하였다. LDA의 특성인 Linear transform 때문에 ANN에 비해 학습 시간이 매우 짧다. 내 노트북에서 대략 5초면 끝난다. ANN은 i7 2600k에서 4시간 정도 학습 시켰었다. 하지만 성능이 떨어진다.
학습 결과
매트랩 소스
1. data를 읽어오는 매트랩 소스
%% Set Directory Path
%dirRootPath = 'C:/Users/CPSLab/Dropbox/연구/CPSLab/구현한 것들/QMULPoseHeads/QMULPoseHeads'; % PC in CPS Lab
dirRootPath = 'C:/Users/Human/Dropbox/연구/CPSLab/구현한 것들/QMULPoseHeads/QMULPoseHeads'; % ma XNote Laptop
%% Get training dataset to 'trainData' and 'trainTargets'
fprintf('1. Get training data from the folder\r\n');
trainFeatureCount = 50*50; % the size of the image is 50 by 50
trainDataCount = 2255; % total: 2255 - but it will produce out-of-memory error.. I need 64bit MATLAB :(
trainTargetCount = 5; % there are total five categories; front, back, left, right face and non-face
% make space for training data (the size is defined above)
trainData = zeros(trainFeatureCount, trainDataCount*trainTargetCount);
trainTargets = zeros(trainTargetCount, trainDataCount*trainTargetCount);
iprev = 0;
for i = 1:5
% for every five categories
% Initialize category
if iprev ~= i
iprev = i;
if i==1
fprintf('front face \t > ');
dirPath = sprintf('%s/train/Data_f', dirRootPath);
fileList = dir( sprintf('%s/*.jpg',dirPath) );
fileNames = {fileList.name};
target = [1 0 0 0 0]';
elseif i==2
fprintf('\r\nback face \t > ');
dirPath = sprintf('%s/train/Data_b', dirRootPath);
fileList = dir( sprintf('%s/*.jpg',dirPath) );
fileNames = {fileList.name};
target = [0 1 0 0 0]';
elseif i==3
fprintf('\r\nleft face \t > ');
dirPath = sprintf('%s/train/Data_l', dirRootPath);
fileList = dir( sprintf('%s/*.jpg',dirPath) );
fileNames = {fileList.name};
target = [0 0 1 0 0]';
elseif i==4
fprintf('\r\nright face \t > ');
dirPath = sprintf('%s/train/Data_r', dirRootPath);
fileList = dir( sprintf('%s/*.jpg',dirPath) );
fileNames = {fileList.name};
target = [0 0 0 1 0]';
elseif i==5
fprintf('\r\nnon face \t > ');
dirPath = sprintf('%s/train/Data_bg', dirRootPath);
fileList = dir( sprintf('%s/*.jpg',dirPath) );
fileNames = {fileList.name};
target = [0 0 0 0 1]';
end
end
k = 1;
for j = 1 : trainDataCount
% for every images(five categories) in the category
image = imread( sprintf('%s/%s', dirPath, fileNames{j} ) );
Igray = rgb2gray(image);
Igray = reshape( Igray, trainFeatureCount, 1);
Igray = Igray - min(Igray); %%%%%%%%%%
trainData( : , (i-1)*trainDataCount+j ) = Igray;
trainTargets( : , (i-1)*trainDataCount+j ) = target;
% make 10 dots total.
if rem(j, int16(trainDataCount/10) ) == 0
fprintf('%d ', k);
k = k + 1;
end
end
end
fprintf('\r\n');
fprintf('Total %d training data loaded to trainData, trainTargets \r\n', trainDataCount*trainTargetCount);
%% Get testing dataset to 'testData' and 'testTargets'
fprintf('1. Get testing data from the folder\r\n');
testFeatureCount = 50*50; % the size of the image is 50 by 50
testDataCount = 1000; % total: 1000
testTargetCount = 5; % there are total five categories; front, back, left, right face and non-face
% make space for testing data (the size is defined above)
testData = zeros(testFeatureCount, testDataCount*testTargetCount);
testTargets = zeros(testTargetCount, testDataCount*testTargetCount);
iprev = 0;
for i = 1:5
% for every five categories
% Initialize category
if iprev ~= i
iprev = i;
if i==1
fprintf('front face \t > ');
dirPath = sprintf('%s/test/Test_f', dirRootPath);
fileList = dir( sprintf('%s/*.jpg',dirPath) );
fileNames = {fileList.name};
target = [1 0 0 0 0]';
elseif i==2
fprintf('\r\nback face \t > ');
dirPath = sprintf('%s/test/Test_b', dirRootPath);
fileList = dir( sprintf('%s/*.jpg',dirPath) );
fileNames = {fileList.name};
target = [0 1 0 0 0]';
elseif i==3
fprintf('\r\nleft face \t > ');
dirPath = sprintf('%s/test/Test_l', dirRootPath);
fileList = dir( sprintf('%s/*.jpg',dirPath) );
fileNames = {fileList.name};
target = [0 0 1 0 0]';
elseif i==4
fprintf('\r\nright face \t > ');
dirPath = sprintf('%s/test/Test_r', dirRootPath);
fileList = dir( sprintf('%s/*.jpg',dirPath) );
fileNames = {fileList.name};
target = [0 0 0 1 0]';
elseif i==5
fprintf('\r\nnon face \t > ');
dirPath = sprintf('%s/test/Test_bg', dirRootPath);
fileList = dir( sprintf('%s/*.jpg',dirPath) );
fileNames = {fileList.name};
target = [0 0 0 0 1]';
end
end
k = 1;
for j = 1 : testDataCount
% for every images(five categories) in the category
image = imread( sprintf('%s/%s', dirPath, fileNames{j} ) );
Igray = rgb2gray(image);
Igray = reshape( Igray, testFeatureCount, 1);
Igray = Igray - min(Igray); %%%%%%%%%%
testData( : , (i-1)*testDataCount+j ) = Igray;
testTargets( : , (i-1)*testDataCount+j ) = target;
% make 10 dots total.
if rem(j, int16(testDataCount/10) ) == 0
fprintf('%d ', k);
k = k + 1;
end
end
end
fprintf('\r\n');
fprintf('Total %d test data loaded to testData, testTargets \r\n', testDataCount*testTargetCount);
%%
clear fileList;
clear fileNames;
clear Image;
clear Igray;
2. LDA를 수행하고, test하는 매트랩 소스
%%
clc;
clear all;
%% First, get training and test data from the folder
% data is stored in trainingData, trainingTargets, testData, testTargets
% targetLabel is expressed as follows; class 1 among 5: [1 0 0 0 0];, class 2: [0 1 0 0 0]'
% the n-th column (not row) in data indicates independent vector
% ex) trainData = [ 1thVector 2ndVector 3rdVector ... ];
% trainTargets = [1thVectorClass 2ndVectorClass 3rdVectorClass ... ];
getHeadPoseData;
%% Second, do LDA to training data set
% 1. the data matrix must be transposed
trainData = trainData';
trainTargets = trainTargets';
testData = testData';
testTargets = testTargets';
% 2. the target matrix must be modified: [1 0 0 0] -> 1 ; [0 1 0 0] -> 2 ; ...
trainTargets = trainTargets*[1 2 3 4 5]';
testTargets = testTargets*[1 2 3 4 5]';
%% Now we are ready to perfrom LDA
W = LDA(trainData, trainTargets);
%% Testing the performance of this LDA with trainSet
L = [ones(size(trainData, 1),1) trainData] * W';
P = exp(L) ./ repmat( sum(exp(L),2), [1 5] );
[maxVal maxIndex] = max(P, [], 2);
LDA_Validate = (maxIndex == trainTargets);
LDA_Performance = sum(LDA_Validate)/size(LDA_Validate,1);
fprintf('the performance of this LDA is %.1f with average certainty %.1f \r\n'...
, LDA_Performance*100, mean(maxVal)*100);
%% Test with the testSet
L = [ones(size(testData, 1),1) testData] * W';
P = exp(L) ./ repmat( sum(exp(L),2), [1 5] );
[maxVal maxIndex] = max(P, [], 2);
LDA_Validate = (maxIndex == testTargets);
LDA_Performance = sum(LDA_Validate)/size(LDA_Validate,1);
fprintf('the test result of this LDA is %.1f with average certainty %.1f \r\n'...
, LDA_Performance*100, mean(maxVal)*100);
%%
3. LDA 함수 매트랩 소스
% LDA - MATLAB subroutine to perform linear discriminant analysis
% by Will Dwinnell and Deniz Sevis
%
% Use:
% W = LDA(Input,Target,Priors)
%
% W = discovered linear coefficients (first column is the constants)
% Input = predictor data (variables in columns, observations in rows)
% Target = target variable (class labels)
% Priors = vector of prior probabilities (optional)
%
% Note: discriminant coefficients are stored in W in the order of unique(Target)
function W = LDA(Input, Target, Priors)
% Determine size of input data
[n m] = size(Input);
% Discover and count unique class labels
ClassLabel = unique(Target);
k = length(ClassLabel);
% Initialize
nGroup = NaN(k,1); % Group counts
GroupMean = NaN(k,m); % Group sample means
PooledCov = zeros(m,m); % Pooled covariance
W = NaN(k,m+1); % model coefficients
if (nargin >= 3) PriorProb = Priors; end
% Loop over classes to perform intermediate calculations
for i = 1:k,
% Establish location and size of each class
Group = (Target == ClassLabel(i));
nGroup(i) = sum(double(Group));
% Calculate group mean vectors
GroupMean(i,:) = mean(Input(Group,:));
% Accumulate pooled covariance information
PooledCov = PooledCov + ((nGroup(i) - 1) / (n - k) ).* cov(Input(Group,:));
end
% Assign prior probabilities
if (nargin >= 3)
% Use the user-supplied priors
PriorProb = Priors;
else
% Use the sample probabilities
PriorProb = nGroup / n;
end
% Loop over classes to calculate linear discriminant coefficients
for i = 1:k,
% Intermediate calculation for efficiency
% This replaces: GroupMean(g,:) * inv(PooledCov)
Temp = GroupMean(i,:) / PooledCov;
% Constant
W(i,1) = -0.5 * Temp * GroupMean(i,:)' + log(PriorProb(i));
% Linear
W(i,2:end) = Temp;
end
% Housekeeping
clear Temp
end
% EOF
'Enginius > Machine Learning' 카테고리의 다른 글
[Deep Learning ]RBM trained with Contrastive Divergence (6) | 2012.06.15 |
---|---|
내가 만든 LDA in Matlab (3) | 2012.06.10 |
Linear Discriminant Analysis Matlab Example (0) | 2012.06.09 |
Random Forest Description (0) | 2012.06.07 |
ANN로 headPoseEstimate하기 (0) | 2012.06.05 |