-
Notifications
You must be signed in to change notification settings - Fork 1
/
exercise2.m
67 lines (51 loc) · 2.15 KB
/
exercise2.m
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
addpath(genpath('feature-extraction'));
addpath(genpath('liblinear-1.94'));
%% STEP 1
% Compile the feature-extraction toolbox
% - cd to feature-extraction folder
% - type compile (there will be warnings printed - ignore them)
%% STEP 2
% Compile liblinear (only for non-Windows machines)
% - cd to liblinear-1.94/matlab
% - edit line: MATLABDIR ?= /usr/local/MATLAB/R2012b
% - MATLABDIR can be obtained by typing "matlabroot" in Matlab
% - Go to the same folder in terminal and type make (again, ignore warnings)
%% GO TO LINE 43. NO NEED TO TOUCH HERE.
% Initialize variables for calling datasets_feature function
info = load('filelists.mat');
datasets = {'tutorial'};
train_lists = {info.trainfiles};
test_lists = {info.testfiles};
feature = 'hog2x2';
% Load the configuration and set dictionary size to 20 (for fast demo)
c = conf();
c.verbosity = 1;
c.feature_config.(feature).dictionary_size = 20;
% Some default parameters for faster learning (do not modify)
c.feature_config.(feature).num_desc = 2e4;
c.feature_config.(feature).descPerImage = 200;
% Compute train and test features
datasets_feature(datasets, train_lists, test_lists, feature, c);
% Load train and test features
train_features = load_feature(datasets{1}, feature, 'train', c);
test_features = load_feature(datasets{1}, feature, 'test', c);
%% STEP 3: Play with different C parameters and see how the results change
% In the real scenario, you need to find the best performing C paramter
% based on the validation set, and you can test on the test set for the
% final performance.
% svm parameter setup
C_param = 0.1;
svm_options = ['-s 2 -B 1 -c ' num2str(C_param) ' -q'];
% SVM training with extracted training features
model = train(info.trainlabels', sparse(double(train_features)), svm_options);
% SVM testing with the trained model on the test set
[predicted_labels, ~, predicted_conf] = predict(info.testlabels', sparse(double(test_features)), model);
% subplot images in the order of confidence
[~,b] = sort(predicted_conf, 'descend');
ha=tight_subplot(5,6,[0.01 0.01], [0.05 0.05], [0.05 0.05]);
for i = 1:30
axes(ha(i));
im=imread(test_lists{1}{b(i)});
image(im);
axis off;
end