Skip to content

Commit b6e50f6

Browse files
author
Merouane Atig
committed
Import ex7
1 parent c7d329e commit b6e50f6

21 files changed

+1462
-0
lines changed

ex7.pdf

668 KB
Binary file not shown.

mlclass-ex7/bird_small.png

32.3 KB
Loading

mlclass-ex7/computeCentroids.m

Lines changed: 40 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,40 @@
1+
function centroids = computeCentroids(X, idx, K)
2+
%COMPUTECENTROIDS returs the new centroids by computing the means of the
3+
%data points assigned to each centroid.
4+
% centroids = COMPUTECENTROIDS(X, idx, K) returns the new centroids by
5+
% computing the means of the data points assigned to each centroid. It is
6+
% given a dataset X where each row is a single data point, a vector
7+
% idx of centroid assignments (i.e. each entry in range [1..K]) for each
8+
% example, and K, the number of centroids. You should return a matrix
9+
% centroids, where each row of centroids is the mean of the data points
10+
% assigned to it.
11+
%
12+
13+
% Useful variables
14+
[m n] = size(X);
15+
16+
% You need to return the following variables correctly.
17+
centroids = zeros(K, n);
18+
19+
20+
% ====================== YOUR CODE HERE ======================
21+
% Instructions: Go over every centroid and compute mean of all points that
22+
% belong to it. Concretely, the row vector centroids(i, :)
23+
% should contain the mean of the data points assigned to
24+
% centroid i.
25+
%
26+
% Note: You can use a for-loop over the centroids to compute this.
27+
%
28+
29+
30+
31+
32+
33+
34+
35+
36+
% =============================================================
37+
38+
39+
end
40+

mlclass-ex7/displayData.m

Lines changed: 59 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,59 @@
1+
function [h, display_array] = displayData(X, example_width)
2+
%DISPLAYDATA Display 2D data in a nice grid
3+
% [h, display_array] = DISPLAYDATA(X, example_width) displays 2D data
4+
% stored in X in a nice grid. It returns the figure handle h and the
5+
% displayed array if requested.
6+
7+
% Set example_width automatically if not passed in
8+
if ~exist('example_width', 'var') || isempty(example_width)
9+
example_width = round(sqrt(size(X, 2)));
10+
end
11+
12+
% Gray Image
13+
colormap(gray);
14+
15+
% Compute rows, cols
16+
[m n] = size(X);
17+
example_height = (n / example_width);
18+
19+
% Compute number of items to display
20+
display_rows = floor(sqrt(m));
21+
display_cols = ceil(m / display_rows);
22+
23+
% Between images padding
24+
pad = 1;
25+
26+
% Setup blank display
27+
display_array = - ones(pad + display_rows * (example_height + pad), ...
28+
pad + display_cols * (example_width + pad));
29+
30+
% Copy each example into a patch on the display array
31+
curr_ex = 1;
32+
for j = 1:display_rows
33+
for i = 1:display_cols
34+
if curr_ex > m,
35+
break;
36+
end
37+
% Copy the patch
38+
39+
% Get the max value of the patch
40+
max_val = max(abs(X(curr_ex, :)));
41+
display_array(pad + (j - 1) * (example_height + pad) + (1:example_height), ...
42+
pad + (i - 1) * (example_width + pad) + (1:example_width)) = ...
43+
reshape(X(curr_ex, :), example_height, example_width) / max_val;
44+
curr_ex = curr_ex + 1;
45+
end
46+
if curr_ex > m,
47+
break;
48+
end
49+
end
50+
51+
% Display Image
52+
h = imagesc(display_array, [-1 1]);
53+
54+
% Do not show axis
55+
axis image off
56+
57+
drawnow;
58+
59+
end

mlclass-ex7/drawLine.m

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,8 @@
1+
function drawLine(p1, p2, varargin)
2+
%DRAWLINE Draws a line from point p1 to point p2
3+
% DRAWLINE(p1, p2) Draws a line from point p1 to point p2 and holds the
4+
% current figure
5+
6+
plot([p1(1) p2(1)], [p1(2) p2(2)], varargin{:});
7+
8+
end

mlclass-ex7/ex7.m

Lines changed: 170 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,170 @@
1+
%% Machine Learning Online Class
2+
% Exercise 7 | Principle Component Analysis and K-Means Clustering
3+
%
4+
% Instructions
5+
% ------------
6+
%
7+
% This file contains code that helps you get started on the
8+
% exercise. You will need to complete the following functions:
9+
%
10+
% pca.m
11+
% projectData.m
12+
% recoverData.m
13+
% computeCentroids.m
14+
% findClosestCentroids.m
15+
% kMeansInitCentroids.m
16+
%
17+
% For this exercise, you will not need to change any code in this file,
18+
% or any other files other than those mentioned above.
19+
%
20+
21+
%% Initialization
22+
clear ; close all; clc
23+
24+
%% ================= Part 1: Find Closest Centroids ====================
25+
% To help you implement K-Means, we have divided the learning algorithm
26+
% into two functions -- findClosestCentroids and computeCentroids. In this
27+
% part, you shoudl complete the code in the findClosestCentroids function.
28+
%
29+
fprintf('Finding closest centroids.\n\n');
30+
31+
% Load an example dataset that we will be using
32+
load('ex7data2.mat');
33+
34+
% Select an initial set of centroids
35+
K = 3; % 3 Centroids
36+
initial_centroids = [3 3; 6 2; 8 5];
37+
38+
% Find the closest centroids for the examples using the
39+
% initial_centroids
40+
idx = findClosestCentroids(X, initial_centroids);
41+
42+
fprintf('Closest centroids for the first 3 examples: \n')
43+
fprintf(' %d', idx(1:3));
44+
fprintf('\n(the closest centroids should be 1, 3, 2 respectively)\n');
45+
46+
fprintf('Program paused. Press enter to continue.\n');
47+
pause;
48+
49+
%% ===================== Part 2: Compute Means =========================
50+
% After implementing the closest centroids function, you should now
51+
% complete the computeCentroids function.
52+
%
53+
fprintf('\nComputing centroids means.\n\n');
54+
55+
% Compute means based on the closest centroids found in the previous part.
56+
centroids = computeCentroids(X, idx, K);
57+
58+
fprintf('Centroids computed after initial finding of closest centroids: \n')
59+
fprintf(' %f %f \n' , centroids');
60+
fprintf('\n(the centroids should be\n');
61+
fprintf(' [ 2.428301 3.157924 ]\n');
62+
fprintf(' [ 5.813503 2.633656 ]\n');
63+
fprintf(' [ 7.119387 3.616684 ]\n\n');
64+
65+
fprintf('Program paused. Press enter to continue.\n');
66+
pause;
67+
68+
69+
%% =================== Part 3: K-Means Clustering ======================
70+
% After you have completed the two functions computeCentroids and
71+
% findClosestCentroids, you have all the necessary pieces to run the
72+
% kMeans algorithm. In this part, you will run the K-Means algorithm on
73+
% the example dataset we have provided.
74+
%
75+
fprintf('\nRunning K-Means clustering on example dataset.\n\n');
76+
77+
% Load an example dataset
78+
load('ex7data2.mat');
79+
80+
% Settings for running K-Means
81+
K = 3;
82+
max_iters = 10;
83+
84+
% For consistency, here we set centroids to specific values
85+
% but in practice you want to generate them automatically, such as by
86+
% settings them to be random examples (as can be seen in
87+
% kMeansInitCentroids).
88+
initial_centroids = [3 3; 6 2; 8 5];
89+
90+
% Run K-Means algorithm. The 'true' at the end tells our function to plot
91+
% the progress of K-Means
92+
[centroids, idx] = runkMeans(X, initial_centroids, max_iters, true);
93+
fprintf('\nK-Means Done.\n\n');
94+
95+
fprintf('Program paused. Press enter to continue.\n');
96+
pause;
97+
98+
%% ============= Part 4: K-Means Clustering on Pixels ===============
99+
% In this exercise, you will use K-Means to compress an image. To do this,
100+
% you will first run K-Means on the colors of the pixels in the image and
101+
% then you will map each pixel on to it's closest centroid.
102+
%
103+
% You should now complete the code in kMeansInitCentroids.m
104+
%
105+
106+
fprintf('\nRunning K-Means clustering on pixels from an image.\n\n');
107+
108+
% Load an image of a bird
109+
A = double(imread('bird_small.png'));
110+
A = A / 255; % Divide by 255 so that all values are in the range 0 - 1
111+
112+
% Size of the image
113+
img_size = size(A);
114+
115+
% Reshape the image into an Nx3 matrix where N = number of pixels.
116+
% Each row will contain the Red, Green and Blue pixel values
117+
% This gives us our dataset matrix X that we will use K-Means on.
118+
X = reshape(A, img_size(1) * img_size(2), 3);
119+
120+
% Run your K-Means algorithm on this data
121+
% You should try different values of K and max_iters here
122+
K = 16;
123+
max_iters = 10;
124+
125+
% When using K-Means, it is important the initialize the centroids
126+
% randomly.
127+
% You should complete the code in kMeansInitCentroids.m before proceeding
128+
initial_centroids = kMeansInitCentroids(X, K);
129+
130+
% Run K-Means
131+
[centroids, idx] = runkMeans(X, initial_centroids, max_iters);
132+
133+
fprintf('Program paused. Press enter to continue.\n');
134+
pause;
135+
136+
137+
%% ================= Part 5: Image Compression ======================
138+
% In this part of the exercise, you will use the clusters of K-Means to
139+
% compress an image. To do this, we first find the closest clusters for
140+
% each example. After that, we
141+
142+
fprintf('\nApplying K-Means to compress an image.\n\n');
143+
144+
% Find closest cluster members
145+
idx = findClosestCentroids(X, centroids);
146+
147+
% Essentially, now we have represented the image X as in terms of the
148+
% indices in idx.
149+
150+
% We can now recover the image from the indices (idx) by mapping each pixel
151+
% (specified by it's index in idx) to the centroid value
152+
X_recovered = centroids(idx,:);
153+
154+
% Reshape the recovered image into proper dimensions
155+
X_recovered = reshape(X_recovered, img_size(1), img_size(2), 3);
156+
157+
% Display the original image
158+
subplot(1, 2, 1);
159+
imagesc(A);
160+
title('Original');
161+
162+
% Display compressed image side by side
163+
subplot(1, 2, 2);
164+
imagesc(X_recovered)
165+
title(sprintf('Compressed, with %d colors.', K));
166+
167+
168+
fprintf('Program paused. Press enter to continue.\n');
169+
pause;
170+

0 commit comments

Comments
 (0)