| @ -0,0 +1,78 @@ | |||||
| from sklearn import datasets | |||||
| from sklearn.linear_model import LogisticRegression | |||||
| import numpy as np | |||||
| import matplotlib.pyplot as plt | |||||
| iris = datasets.load_iris() | |||||
| list(iris.keys()) | |||||
| print(iris.DESCR) | |||||
| X = iris["data"][:, 3:] # petal width | |||||
| y = (iris["target"] == 2).astype(np.int) # 1 if Iris-Virginica, else 0 | |||||
| log_reg = LogisticRegression(solver="liblinear", random_state=42) | |||||
| log_reg.fit(X, y) | |||||
| X_new = np.linspace(0, 3, 1000).reshape(-1, 1) | |||||
| y_proba = log_reg.predict_proba(X_new) | |||||
| decision_boundary = X_new[y_proba[:, 1] >= 0.5][0] | |||||
| plt.figure(figsize=(8, 3)) | |||||
| plt.plot(X[y==0], y[y==0], "bs") | |||||
| plt.plot(X[y==1], y[y==1], "g^") | |||||
| plt.plot([decision_boundary, decision_boundary], [-1, 2], "k:", linewidth=2) | |||||
| plt.plot(X_new, y_proba[:, 1], "g-", linewidth=2, label="Iris-Virginica") | |||||
| plt.plot(X_new, y_proba[:, 0], "b--", linewidth=2, label="Not Iris-Virginica") | |||||
| plt.text(decision_boundary+0.02, 0.15, "Decision boundary", fontsize=14, color="k", ha="center") | |||||
| plt.arrow(decision_boundary, 0.08, -0.3, 0, head_width=0.05, head_length=0.1, fc='b', ec='b') | |||||
| plt.arrow(decision_boundary, 0.92, 0.3, 0, head_width=0.05, head_length=0.1, fc='g', ec='g') | |||||
| plt.xlabel("Ancho de petalo (cm)", fontsize=14) | |||||
| plt.ylabel("Probabilidad", fontsize=14) | |||||
| plt.legend(loc="center left", fontsize=14) | |||||
| plt.axis([0, 3, -0.02, 1.02]) | |||||
| X = iris["data"][:, (2, 3)] # petal length, petal width | |||||
| y = iris["target"] | |||||
| softmax_reg = LogisticRegression(multi_class="multinomial",solver="lbfgs", C=10, random_state=42) | |||||
| softmax_reg.fit(X, y) | |||||
| x0, x1 = np.meshgrid( | |||||
| np.linspace(0, 8, 500).reshape(-1, 1), | |||||
| np.linspace(0, 3.5, 200).reshape(-1, 1), | |||||
| ) | |||||
| X_new = np.c_[x0.ravel(), x1.ravel()] | |||||
| y_proba = softmax_reg.predict_proba(X_new) | |||||
| y_predict = softmax_reg.predict(X_new) | |||||
| zz1 = y_proba[:, 1].reshape(x0.shape) | |||||
| zz = y_predict.reshape(x0.shape) | |||||
| plt.figure(figsize=(10, 4)) | |||||
| plt.plot(X[y==2, 0], X[y==2, 1], "g^", label="Iris-Virginica") | |||||
| plt.plot(X[y==1, 0], X[y==1, 1], "bs", label="Iris-Versicolor") | |||||
| plt.plot(X[y==0, 0], X[y==0, 1], "yo", label="Iris-Setosa") | |||||
| from matplotlib.colors import ListedColormap | |||||
| custom_cmap = ListedColormap(['#fafab0','#9898ff','#a0faa0']) | |||||
| plt.contourf(x0, x1, zz, cmap=custom_cmap) | |||||
| contour = plt.contour(x0, x1, zz1, cmap=plt.cm.brg) | |||||
| plt.clabel(contour, inline=1, fontsize=12) | |||||
| plt.xlabel("Largo de petalo", fontsize=14) | |||||
| plt.ylabel("ancho de petalo", fontsize=14) | |||||
| plt.legend(loc="center left", fontsize=14) | |||||
| plt.axis([0, 7, 0, 3.5]) | |||||
| plt.show() | |||||