Answer To: EECE5644_2020Spring_TakeHome4Questions.pdf EECE5644 Spring 2020 – Take Home Exam 4 Submit: Monday,...
Abr Writing answered on Apr 03 2021
EECE5644_2020Spring_TakeHome4Solution.m
%% Question 1
% Generating the training data
data = exam4q1_generateData(1000);
% Getting the variable X1 or predictor from the generated data
X_train = data(1,:);
% Extracting the response variable from the generated data
y_train = data(2,:);
% Generating the testing data
data = exam4q1_generateData(10000);
% Getting the variable X1 or predictor from the generated data
X_test = data(1,:);
% Extracting the response variable from the generated data
y_test = data(2,:);
% Creating the network with single layer with 10 ndoes
net = feedforwardnet([10]);
% number of hidden layer neurons
net.layers{1}.size = 5;
% hidden layer transfer function
net.layers{1}.transferFcn = 'poslin';
% Declaring the number of folds
k = 10;
% Getting the indexes for cross validation
cv = cvpartition(length(X),'k', k);
for i = 1:k
% Splitting the data based on cross validation
trainIdxs = find(training(cv,i));
testIdxs = find(test(cv,i));
trainMatrix = data(trainIdxs, :);
validMatrix = data(testIdxs, :);
net = configure(net,X_train,y_train);
% initial network response without training
initial_output = net(X_train);
% network training
net.trainFcn = 'trainlm';
net.performFcn = 'mse';
net = train(net,X_train,y_train);
% Plotting performance
% Plotting Training state
% Plotting Error Histogram
% Plotting Regression Results
plotregression(net);
% network response after training
final_output = net(X_val);
end
%% Question 2
% Generating the data
[X_train,y_train] = generateMultiringDataset(2,1000);
[X_test,y_test] = generateMultiringDataset(2,10000);
X_train = X_train';
X_test = X_test';
y_train = num2cell(num2str(y_train'));
y_test = num2cell(num2str(y_test'));
rng(1); % For reproducibility
% 10 fold cross validation
c = cvpartition(length(y_train),'KFold',10);
opts = struct('Optimizer','bayesopt', ...
'ShowPlots',true, ...
'CVPartition',c,...
'AcquisitionFunctionName','expected-improvement-plus');
% Train an SVM classifier.
SVMModel = fitcsvm(X_train,y_train, ...
'Standardize',true, ...
'ClassNames',{'1', '2'}, ...
'KernelFunction','RBF', ...
'OptimizeHyperparameters','auto', ...
'HyperparameterOptimizationOptions',opts);
% Cross-validate the classifier
CVSVMModel = crossval(SVMModel,'Kfold', 10);
% Estimate the generalization error.
kfoldLoss(CVSVMModel)
svInd = SVMModel.IsSupportVector;
h = 1; % Mesh grid step size
[X1,X2] = meshgrid(min(X_train(:,1)):h:max(X_train(:,1)),...
min(X_train(:,2)):h:max(X_train(:,2)));
[~,score] = predict(SVMModel,[X1(:),X2(:)]);
scoreGrid = reshape(score(:,2),size(X1,1),size(X2,2));
figure
plot(X_train(:,1),X_train(:,2),'k.')
hold on
plot(X_train(svInd,1),X_train(svInd,2),'ro','MarkerSize',10)
contour(X1,X2,scoreGrid)
colorbar;
title('{\bfOutlier Detection via SVM with RBF kernel}')
legend('Observation','Support Vector')
hold off
% Prediction
y_pred = predict(SVMModel, X_test);
y_pred = cell2mat(y_pred);
y_test = cell2mat(y_test);
confusionmat(y_test, y_pred)
%% Question 3
% Reading the file
airplane = imread('3096_color.jpg');
bird = imread('42049_color.jpg');
% Generating 5 dimensional array as described in problem
airplane_temp = zeros(size(airplane,1)*size(airplane,2), 5);
for i = 1:size(airplane,1)
for j = 1:size(airplane,2)
airplane_temp((i-1)*size(airplane, 1)+j, :) = [i, ...
j ...
airplane(i, j, 1), ...
airplane(i, j, 2), ...
airplane(i, j, 3)];
end
end
airplane = airplane_temp;
bird_temp = zeros(size(bird,1)*size(bird,2), 5);
for i = 1:size(bird,1)
for j = 1:size(bird,2)
bird_temp((i-1)*size(bird, 1)+j, :) = [i, ...
j ...
bird(i, j, 1), ...
bird(i, j, 2), ...
bird(i, j, 3)];
end
end
bird = bird_temp;
airplane_plot = airplane(:,1:2);
bird_plot = bird(:,1:2);
% Normalizing the daya
airplane(:,1) = standardize(airplane(:,1));
airplane(:,2) = standardize(airplane(:,2));
airplane(:,3) = standardize(airplane(:,3));
airplane(:,4) = standardize(airplane(:,4));
airplane(:,5) = standardize(airplane(:,5));
bird(:,1) = standardize(bird(:,1));
bird(:,2) = standardize(bird(:,2));
bird(:,3) = standardize(bird(:,3));
bird(:,4) = standardize(bird(:,4));
bird(:,5) = standardize(bird(:,5));
% Fitting the model
GMModel_airplane = fitgmdist(airplane,2,'RegularizationValue',0.1);
GMModel_bird = fitgmdist(bird,2,'RegularizationValue',0.1);
GMModels_airplane = cell(3,1); % Preallocation
options = statset('MaxIter',1000);
for j = 1:3
GMModels_airplane{j} = fitgmdist(airplane,j,'Options',options,'RegularizationValue',0.1);
fprintf('\n GM Mean for %i Component(s)\n',j)
Mu = GMModels_airplane{j}.mu
end
figure
for j = 1:3
idx = cluster(GMModels_airplane{j},airplane);
subplot(2,2,j)
gscatter(airplane_plot(:,1),airplane_plot(:,2),idx)
title(sprintf('GM Model - %i Component(s)',j));
xlabel('');
ylabel('');
legend off;
hold off
end
% Fitting the model
GMModel_bird = fitgmdist(bird,2,'RegularizationValue',0.1);
GMModel_bird = fitgmdist(bird,2,'RegularizationValue',0.1);
GMModels_bird = cell(3,1); % Preallocation
options = statset('MaxIter',1000);
for j = 1:3
GMModels_bird{j} = fitgmdist(bird,j,'Options',options,'RegularizationValue',0.1);
fprintf('\n GM Mean for %i Component(s)\n',j)
Mu = GMModels_bird{j}.mu
end
figure
for j = 1:3
idx = cluster(GMModels_bird{j},bird);
subplot(2,2,j)
gscatter(bird_plot(:,1),bird_plot(:,2),idx)
title(sprintf('GM Model - %i Component(s)',j));
xlabel('');
ylabel('');
legend off;
hold off
end
standardize.m
function X_std = standardize(X)
X_std = (X - min(X)) / ( max(X) - min(X) );
end
exam4q1_generateData.m
function x = exam4q1_generateData(N)
close all,
m(:,1) = [-9;-4]; Sigma(:,:,1) = 4*[1,0.8;0.8,1]; % mean and covariance of data pdf conditioned on label 3
m(:,2) = [0;0]; Sigma(:,:,2) = 3*[3,0;0,0.3]; % mean and covariance of data pdf conditioned on label 2
m(:,3) = [8;-3]; Sigma(:,:,3) =...