|
34 | 34 | ]
|
35 | 35 |
|
36 | 36 | classifiers = [
|
37 |
| - KNeighborsClassifier.(3), |
38 |
| - SVC.(kernel: 'linear', C: 0.025), |
39 |
| - SVC.(gamma: 2, C: 1), |
40 |
| - DecisionTreeClassifier.(max_depth: 5), |
41 |
| - RandomForestClassifier.(max_depth: 5, n_estimators: 10, max_features: 1), |
42 |
| - AdaBoostClassifier.(), |
43 |
| - GaussianNB.(), |
44 |
| - LinearDiscriminantAnalysis.(), |
45 |
| - QuadraticDiscriminantAnalysis.() |
| 37 | + KNeighborsClassifier.new(3), |
| 38 | + SVC.new(kernel: 'linear', C: 0.025), |
| 39 | + SVC.new(gamma: 2, C: 1), |
| 40 | + DecisionTreeClassifier.new(max_depth: 5), |
| 41 | + RandomForestClassifier.new(max_depth: 5, n_estimators: 10, max_features: 1), |
| 42 | + AdaBoostClassifier.new(), |
| 43 | + GaussianNB.new(), |
| 44 | + LinearDiscriminantAnalysis.new(), |
| 45 | + QuadraticDiscriminantAnalysis.new() |
46 | 46 | ]
|
47 | 47 |
|
48 |
| -x, y = make_classification.( |
| 48 | +x, y = *make_classification( |
49 | 49 | n_features: 2,
|
50 | 50 | n_redundant: 0,
|
51 | 51 | n_informative: 2,
|
52 | 52 | random_state: 1,
|
53 | 53 | n_clusters_per_class: 1
|
54 | 54 | )
|
55 | 55 |
|
56 |
| -np.random.seed.(42) |
57 |
| -x += 2 * np.random.random_sample.(x.shape) |
58 |
| -linearly_separable = PyCall.tuple(x, y) |
| 56 | +np.random.seed(42) |
| 57 | +x += 2 * np.random.random_sample(x.shape) |
| 58 | +linearly_separable = PyCall.tuple([x, y]) # FIXME: allow PyCall.tuple(x, y) |
59 | 59 |
|
60 | 60 | datasets = [
|
61 |
| - make_moons.(noise: 0.3, random_state: 0), |
62 |
| - make_circles.(noise: 0.2, factor: 0.5, random_state: 1), |
| 61 | + make_moons(noise: 0.3, random_state: 0), |
| 62 | + make_circles(noise: 0.2, factor: 0.5, random_state: 1), |
63 | 63 | linearly_separable
|
64 | 64 | ]
|
65 | 65 |
|
66 |
| -fig = plt.figure.(figsize: PyCall.tuple(27, 9)) |
| 66 | +fig = plt.figure(figsize: [27, 9]) |
67 | 67 | i = 1
|
68 |
| -all = PyCall.slice(nil) |
| 68 | +all = PyCall::Slice.all |
69 | 69 | datasets.each do |ds|
|
70 |
| - x, y = ds |
71 |
| - x = StandardScaler.().fit_transform.(x) |
72 |
| - x_train, x_test, y_train, y_test = train_test_split.(x, y, test_size: 0.4) |
| 70 | + x, y = *ds |
| 71 | + x = StandardScaler.new.fit_transform(x) |
| 72 | + x_train, x_test, y_train, y_test = train_test_split(x, y, test_size: 0.4) |
73 | 73 |
|
74 |
| - x_min, x_max = np.min.(x[all, 0]) - 0.5, np.max.(x[all, 0]) + 0.5 |
75 |
| - y_min, y_max = np.min.(x[all, 1]) - 0.5, np.max.(x[all, 1]) + 0.5 |
| 74 | + x_min, x_max = np.min(x[all, 0]) - 0.5, np.max(x[all, 0]) + 0.5 |
| 75 | + y_min, y_max = np.min(x[all, 1]) - 0.5, np.max(x[all, 1]) + 0.5 |
76 | 76 |
|
77 |
| - xx, yy = np.meshgrid.( |
78 |
| - np.linspace.(x_min, x_max, ((x_max - x_min)/h).round), |
79 |
| - np.linspace.(y_min, y_max, ((y_max - y_min)/h).round), |
| 77 | + xx, yy = np.meshgrid( |
| 78 | + np.linspace(x_min, x_max, ((x_max - x_min)/h).round), |
| 79 | + np.linspace(y_min, y_max, ((y_max - y_min)/h).round), |
80 | 80 | )
|
81 |
| - mesh_points = np.dstack.(PyCall.tuple(xx.ravel.(), yy.ravel.()))[0, all, all] |
| 81 | + mesh_points = np.dstack(PyCall.tuple([xx.ravel(), yy.ravel()]))[0, all, all] |
82 | 82 |
|
83 | 83 | # just plot the dataset first
|
84 |
| - cm = plt.cm.RdBu |
85 |
| - cm_bright = mplc.ListedColormap.(["#FF0000", "#0000FF"]) |
86 |
| - ax = plt.subplot.(datasets.length, classifiers.length + 1, i) |
| 84 | + cm = plt.cm.__dict__[:RdBu] |
| 85 | + cm_bright = mplc.ListedColormap.new(["#FF0000", "#0000FF"]) |
| 86 | + ax = plt.subplot(datasets.length, classifiers.length + 1, i) |
87 | 87 | # plot the training points
|
88 |
| - ax.scatter.(x_train[all, 0], x_train[all, 1], c: y_train, cmap: cm_bright) |
| 88 | + ax.scatter(x_train[all, 0], x_train[all, 1], c: y_train, cmap: cm_bright) |
89 | 89 | # and testing points
|
90 |
| - ax.scatter.(x_test[all, 0], x_test[all, 1], c: y_test, cmap: cm_bright, alpha: 0.6) |
| 90 | + ax.scatter(x_test[all, 0], x_test[all, 1], c: y_test, cmap: cm_bright, alpha: 0.6) |
91 | 91 |
|
92 |
| - ax.set_xlim.(np.min.(xx), np.max.(xx)) |
93 |
| - ax.set_ylim.(np.min.(yy), np.max.(yy)) |
94 |
| - ax.set_xticks.(PyCall.tuple()) |
95 |
| - ax.set_yticks.(PyCall.tuple()) |
| 92 | + ax.set_xlim(np.min(xx), np.max(xx)) |
| 93 | + ax.set_ylim(np.min(yy), np.max(yy)) |
| 94 | + ax.set_xticks(PyCall.tuple()) |
| 95 | + ax.set_yticks(PyCall.tuple()) |
96 | 96 | i += 1
|
97 | 97 |
|
98 | 98 | # iterate over classifiers
|
99 | 99 | names.zip(classifiers).each do |name, clf|
|
100 |
| - ax = plt.subplot.(datasets.length, classifiers.length + 1, i) |
101 |
| - clf.fit.(x_train, y_train) |
102 |
| - scor = clf.score.(x_test, y_test) |
| 100 | + ax = plt.subplot(datasets.length, classifiers.length + 1, i) |
| 101 | + clf.fit(x_train, y_train) |
| 102 | + scor = clf.score(x_test, y_test) |
103 | 103 |
|
104 | 104 | # Plot the decision boundary. For that, we will assign a color to each
|
105 | 105 | # point in the mesh [x_min, x_max]x[y_min, y_max]
|
106 | 106 | begin
|
107 | 107 | # not implemented for some
|
108 |
| - z = clf.decision_function.(mesh_points) |
| 108 | + z = clf.decision_function(mesh_points) |
109 | 109 | rescue
|
110 |
| - z = clf.predict_proba.(mesh_points)[all, 1] |
| 110 | + z = clf.predict_proba(mesh_points)[all, 1] |
111 | 111 | end
|
112 | 112 |
|
113 | 113 | # Put the result into a color plot
|
114 |
| - z = z.reshape.(xx.shape) |
115 |
| - ax.contourf.(xx, yy, z, cmap: cm, alpha: 0.8) |
| 114 | + z = z.reshape(xx.shape) |
| 115 | + ax.contourf(xx, yy, z, cmap: cm, alpha: 0.8) |
116 | 116 |
|
117 | 117 | # Plot also the training points
|
118 |
| - ax.scatter.(x_train[all, 0], x_train[all, 1], c: y_train, cmap: cm_bright) |
| 118 | + ax.scatter(x_train[all, 0], x_train[all, 1], c: y_train, cmap: cm_bright) |
119 | 119 | # and testing points
|
120 |
| - ax.scatter.(x_test[all, 0], x_test[all, 1], c: y_test, cmap: cm_bright, alpha: 0.6) |
| 120 | + ax.scatter(x_test[all, 0], x_test[all, 1], c: y_test, cmap: cm_bright, alpha: 0.6) |
121 | 121 |
|
122 |
| - ax.set_xlim.(np.min.(xx), np.max.(xx)) |
123 |
| - ax.set_ylim.(np.min.(yy), np.max.(yy)) |
124 |
| - ax.set_xticks.(PyCall.tuple()) |
125 |
| - ax.set_yticks.(PyCall.tuple()) |
126 |
| - ax.set_title.(name) |
| 122 | + ax.set_xlim(np.min(xx), np.max(xx)) |
| 123 | + ax.set_ylim(np.min(yy), np.max(yy)) |
| 124 | + ax.set_xticks(PyCall.tuple()) |
| 125 | + ax.set_yticks(PyCall.tuple()) |
| 126 | + ax.set_title(name) |
127 | 127 |
|
128 |
| - ax.text.(np.max.(xx) - 0.3, np.min.(yy) + 0.3, "%.2f" % scor, size: 15, horizontalalignment: 'right') |
| 128 | + ax.text(np.max(xx) - 0.3, np.min(yy) + 0.3, "%.2f" % scor, size: 15, horizontalalignment: 'right') |
129 | 129 |
|
130 | 130 | i += 1
|
131 | 131 | end
|
132 | 132 | end
|
133 | 133 |
|
134 |
| -fig.subplots_adjust.(left: 0.02, right: 0.98) |
135 |
| -plt.show.() |
| 134 | +fig.subplots_adjust(left: 0.02, right: 0.98) |
| 135 | +plt.show() |
0 commit comments