You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

ex1_multi.m 6.3 kB

8 years ago
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201
  1. %% Machine Learning Online Class
  2. % Exercise 1: Linear regression with multiple variables
  3. %
  4. % Instructions
  5. % ------------
  6. %
  7. % This file contains code that helps you get started on the
  8. % linear regression exercise.
  9. %
  10. % You will need to complete the following functions in this
  11. % exericse:
  12. %
  13. % warmUpExercise.m
  14. % plotData.m
  15. % gradientDescent.m
  16. % computeCost.m
  17. % gradientDescentMulti.m
  18. % computeCostMulti.m
  19. % featureNormalize.m
  20. % normalEqn.m
  21. %
  22. % For this part of the exercise, you will need to change some
  23. % parts of the code below for various experiments (e.g., changing
  24. % learning rates).
  25. %
  26. %% Initialization
  27. %% ================ Part 1: Feature Normalization ================
  28. %% Clear and Close Figures
  29. clear ; close all; clc
  30. fprintf('Loading data ...\n');
  31. %% Load Data
  32. data = load('ex1data2.txt');
  33. X = data(:, 1:2);
  34. y = data(:, 3);
  35. m = length(y);
  36. % Print out some data points
  37. fprintf('First 10 examples from the dataset: \n');
  38. fprintf(' x = [%.0f %.0f], y = %.0f \n', [X(1:10,:) y(1:10,:)]');
  39. fprintf('Program paused. Press enter to continue.\n');
  40. pause;
  41. % Scale features and set them to zero mean
  42. fprintf('Normalizing Features ...\n');
  43. [X mu sigma] = featureNormalize(X);
  44. % Add intercept term to X
  45. X = [ones(m, 1) X];
  46. %% ================ Part 2: Gradient Descent ================
  47. % ====================== YOUR CODE HERE ======================
  48. % Instructions: We have provided you with the following starter
  49. % code that runs gradient descent with a particular
  50. % learning rate (alpha).
  51. %
  52. % Your task is to first make sure that your functions -
  53. % computeCost and gradientDescent already work with
  54. % this starter code and support multiple variables.
  55. %
  56. % After that, try running gradient descent with
  57. % different values of alpha and see which one gives
  58. % you the best result.
  59. %
  60. % Finally, you should complete the code at the end
  61. % to predict the price of a 1650 sq-ft, 3 br house.
  62. %
  63. % Hint: By using the 'hold on' command, you can plot multiple
  64. % graphs on the same figure.
  65. %
  66. % Hint: At prediction, make sure you do the same feature normalization.
  67. %
  68. fprintf('Running gradient descent ...\n');
  69. % Choose some alpha value
  70. alpha = 0.001;
  71. num_iters = 4000;
  72. % Init Theta and Run Gradient Descent
  73. theta = zeros(3, 1);
  74. [theta, J_history] = gradientDescentMulti(X, y, theta, alpha, num_iters);
  75. %[theta, J_history] = gradientDescentMulti(X, y, zeros(3, 1), 0.001, 4000);
  76. %plot(1:numel(J_history), J_history, '-bc', 'LineWidth', 2);
  77. %price = theta(1) + (1650 - mu(1)) / sigma(1) * theta(2) + (3 - mu(2)) / sigma(2) * theta(3) % You should change this
  78. %price = theta(1) + (15 - mu(1)) / sigma(1) * theta(2) + (1 - mu(2)) / sigma(2) * theta(3) + (2 - mu(3)) / sigma(3) * theta(4) % You should change this
  79. fprintf(['Predicted price of a 1650 sq-ft, 3 br house ' ...
  80. '(using gradient descent):\n $%f\n'], price);
  81. % Plot the convergence graph
  82. figure;
  83. plot(1:numel(J_history), J_history, '-b', 'LineWidth', 2);
  84. xlabel('Number of iterations');
  85. ylabel('Cost J');
  86. % Display gradient descent's result
  87. fprintf('Theta computed from gradient descent: \n');
  88. fprintf(' %f \n', theta);
  89. fprintf('\n');
  90. % Estimate the price of a 1650 sq-ft, 3 br house
  91. % ====================== YOUR CODE HERE ======================
  92. % Recall that the first column of X is all-ones. Thus, it does
  93. % not need to be normalized.
  94. price = 0; % You should change this
  95. % ============================================================
  96. fprintf(['Predicted price of a 1650 sq-ft, 3 br house ' ...
  97. '(using gradient descent):\n $%f\n'], price);
  98. fprintf('Program paused. Press enter to continue.\n');
  99. pause;
  100. %% ================ Part 3: Normal Equations ================
  101. fprintf('Solving with normal equations...\n');
  102. % ====================== YOUR CODE HERE ======================
  103. % Instructions: The following code computes the closed form
  104. % solution for linear regression using the normal
  105. % equations. You should complete the code in
  106. % normalEqn.m
  107. %
  108. % After doing so, you should complete this code
  109. % to predict the price of a 1650 sq-ft, 3 br house.
  110. %
  111. %% Load Data
  112. data = csvread('ex1data2.txt');
  113. X = data(:, 1:2);
  114. y = data(:, 3);
  115. m = length(y);
  116. % Add intercept term to X
  117. X = [ones(m, 1) X];
  118. % Calculate the parameters from the normal equation
  119. theta = normalEqn(X, y);
  120. % Display normal equation's result
  121. fprintf('Theta computed from the normal equations: \n');
  122. fprintf(' %f \n', theta);
  123. fprintf('\n');
  124. % Estimate the price of a 1650 sq-ft, 3 br house
  125. % ====================== YOUR CODE HERE ======================
  126. price = theta(1) + 1650 * theta(2) + 3 * theta(3); % You should change this
  127. % ============================================================
  128. fprintf(['Predicted price of a 1650 sq-ft, 3 br house ' ...
  129. '(using normal equations):\n $%f\n'], price);
  130. %% ============= Part 4: Visualizing J(theta_0, theta_1) =============
  131. fprintf('Visualizing J(theta_0, theta_1) ...\n')
  132. % Grid over which we will calculate J
  133. theta0_vals = linspace(-10, 10, 100);
  134. theta1_vals = linspace(-1, 4, 100);%从-1到4之间取100个数组成一个向量
  135. theta2_vals = linspace(-1, 4, 100);%从-1到4之间取100个数组成一个向量
  136. % initialize J_vals to a matrix of 0's
  137. J_vals = zeros(length(theta0_vals), length(theta1_vals), length(theta1_vals));
  138. % Fill out J_vals
  139. for i = 1:length(theta0_vals)
  140. for j = 1:length(theta1_vals)
  141. t = [theta0_vals(i); theta1_vals(j)];
  142. J_vals(i,j) = computeCostMulti(X, y, t);
  143. end
  144. end
  145. % Because of the way meshgrids work in the surf command, we need to
  146. % transpose J_vals before calling surf, or else the axes will be flipped
  147. J_vals = J_vals';
  148. % Surface plot
  149. figure;
  150. surf(theta0_vals, theta1_vals, J_vals)%画出三维图形
  151. xlabel('\theta_0'); ylabel('\theta_1');
  152. % Contour plot 轮廓图
  153. figure;
  154. % Plot J_vals as 15 contours spaced logarithmically between 0.01 and 100
  155. contour(theta0_vals, theta1_vals, J_vals, logspace(-2, 3, 20))
  156. xlabel('\theta_0'); ylabel('\theta_1');
  157. hold on;
  158. plot(theta(1), theta(2), 'rx', 'MarkerSize', 10, 'LineWidth', 2);

机器学习

Contributors (1)