diff --git a/numpy_questions.py b/numpy_questions.py index 21fcec4b..340f573e 100644 --- a/numpy_questions.py +++ b/numpy_questions.py @@ -42,6 +42,12 @@ def max_index(X): # TODO + if not isinstance(X, np.ndarray): + raise ValueError("Input must be a numpy array.") + if X.ndim != 2: + raise ValueError("Input must be 2D.") + flat_idx = np.argmax(X) + i, j = np.unravel_index(flat_idx, X.shape) return i, j @@ -64,4 +70,13 @@ def wallis_product(n_terms): """ # XXX : The n_terms is an int that corresponds to the number of # terms in the product. For example 10000. - return 0. + if not isinstance(n_terms, int) or n_terms < 0: + raise ValueError("n_terms must be a non-negative integer.") + if n_terms == 0: + return 1.0 + + product = 1.0 + for n in range(1, n_terms + 1): + numerator = 4 * n * n + product *= numerator / (numerator - 1) + return 2.0 * product diff --git a/sklearn_questions.py b/sklearn_questions.py index f65038c6..a4affd43 100644 --- a/sklearn_questions.py +++ b/sklearn_questions.py @@ -28,16 +28,27 @@ from sklearn.utils.multiclass import check_classification_targets -class OneNearestNeighbor(BaseEstimator, ClassifierMixin): +class OneNearestNeighbor(ClassifierMixin, BaseEstimator): "OneNearestNeighbor classifier." def __init__(self): # noqa: D107 pass def fit(self, X, y): - """Write docstring. - - And describe parameters + """ + Fit the OneNearestNeighbor classifier. + + Parameters + ---------- + X : array-like of shape (n_samples, n_features) + Training samples with one row per example. + y : array-like of shape (n_samples,) + Target labels aligned with rows of `X`. + + Returns + ------- + self : OneNearestNeighbor + Fitted estimator. """ X, y = check_X_y(X, y) check_classification_targets(y) @@ -45,12 +56,23 @@ def fit(self, X, y): self.n_features_in_ = X.shape[1] # XXX fix + self._fit_X = X + self._fit_y = y return self def predict(self, X): - """Write docstring. + """ + Predict class labels for the provided samples. + + Parameters + ---------- + X : array-like of shape (n_samples, n_features) + Samples to classify. - And describe parameters + Returns + ------- + y_pred : ndarray of shape (n_samples,) + Predicted class labels. """ check_is_fitted(self) X = check_array(X) @@ -60,15 +82,38 @@ def predict(self, X): ) # XXX fix + + if X.shape[1] != self.n_features_in_: + raise ValueError( + f"X has {X.shape[1]} features, but " + f"{self.__class__.__name__} is expecting " + f"{self.n_features_in_} features as input" + ) + + distances = np.linalg.norm(self._fit_X[None, :, :] - X[:, None, :], axis=2) + nearest_idx = np.argmin(distances, axis=1) + y_pred[:] = self._fit_y[nearest_idx] return y_pred def score(self, X, y): - """Write docstring. - - And describe parameters + """ + Evaluate the classifier on labeled data. + + Parameters + ---------- + X : array-like of shape (n_samples, n_features) + Samples on which to evaluate. + y : array-like of shape (n_samples,) + True labels corresponding to `X`. + + Returns + ------- + score : float + Fraction of correctly classified samples. """ X, y = check_X_y(X, y) y_pred = self.predict(X) # XXX fix + y_pred = (y_pred == y).astype(float) / len(y_pred) return y_pred.sum()