@@ -147,44 +147,34 @@ <h2 class="chapter-subheading">Advanced</h2>
147147 < div class ="mt-4 ">
148148 < span class ="dark "> Support Vector Machine (SVM):</ span > Visualize the decision boundary and support vectors.
149149 < div class ="code-block ">
150- < pre > < code class ="language-python "> import numpy as np
150+ < pre > < code class ="language-python "> < pre > < code class =" language-python " > import numpy as np
151151import matplotlib.pyplot as plt
152- from sklearn import datasets
153- from sklearn.svm import SVC
154152
155- # Generate dataset
156- X, y = datasets.make_blobs(n_samples=100, centers=2, random_state=6)
157- y = 2 * y - 1 # Convert labels to -1, 1 for visualization
153+ # Select First 2 features as X and Y
154+ X_vis = X[:, :2]
158155
159- # Train an SVM classifier
160- svm_model = SVC(kernel='linear')
161- svm_model.fit(X, y)
156+ # Find min and max of both columns
157+ x_min, x_max = X[:, 0].min() - 1, X[:, 0].max() + 1
158+ y_min, y_max = X[:, 1].min() - 1, X[:, 1].max() + 1
162159
163- # Get the coefficients and intercept
164- w = svm_model.coef_[0]
165- b = svm_model.intercept_[0]
160+ # Construct a meshgrid - a list of coordinates
161+ h = 0.01 # Step
162+ x_coordinates, y_coordinates = np.arange(x_min, x_max, h), np.arange(y_min, y_max, h)
163+ xx, yy = np.meshgrid(x_coordinates, y_coordinates)
166164
167165# Decision boundary
168- x_values = np.linspace(X[:, 0].min() - 1, X[:, 0].max() + 1, 100)
169- decision_boundary = -(w[0] / w[1]) * x_values - b / w[1]
170-
171- # Margins
172- margin = 1 / np.sqrt(np.sum(svm_model.coef_ ** 2))
173- margin_upper = decision_boundary + margin
174- margin_lower = decision_boundary - margin
175-
176- # Plot
177- plt.scatter(X[:, 0], X[:, 1], c=y, cmap='coolwarm', s=30, edgecolors='k', label="Training Points")
178- plt.plot(x_values, decision_boundary, 'k-', label='Decision Boundary')
179- plt.plot(x_values, margin_upper, 'k--', label='Margin Upper')
180- plt.plot(x_values, margin_lower, 'k--', label='Margin Lower')
181- plt.scatter(svm_model.support_vectors_[:, 0], svm_model.support_vectors_[:, 1], s=100, facecolors='none', edgecolors='k', label='Support Vectors')
182-
183- plt.title("SVM Decision Boundary")
184- plt.xlabel("Feature 1")
185- plt.ylabel("Feature 2")
186- plt.legend()
187- plt.show()</ code > </ pre >
166+ x_1d, y_1d = xx.ravel(), yy.ravel() # Convert 2D to 1D
167+ values_1d = np.c_[x_1d, y_1d] # Concatenate
168+ Z = model.decision_function(values_1d)
169+ Z = Z.reshape(xx.shape)
170+
171+ plt.scatter(X_vis[:, 0], X_vis[:, 1], c=y, cmap='viridis', alpha=0.7)
172+ plt.contourf(xx, yy, Z, levels=[-1, 0, 1], colors='c', alpha=0.5)
173+ plt.contour(xx, yy, Z, levels=[-1, 0, 1], colors='r', alpha=0.5, linestyles=['--', '-', '--'])
174+ plt.xlabel('Feature 1')
175+ plt.ylabel('Feature 2')
176+ plt.title('SVM Decision Boundary')
177+ plt.show()</ code > </ pre > </ code > </ pre >
188178 </ div >
189179 < div class ="img-div ">
190180 < img src ="{% static 'learn/img/svm_decision_boundary.png' %} " alt ="SVM Decision Boundary " style ="width:500px; ">
0 commit comments