diff --git a/aeon/anomaly_detection/_cblof.py b/aeon/anomaly_detection/_cblof.py index 506974f6ca..64e84d9679 100644 --- a/aeon/anomaly_detection/_cblof.py +++ b/aeon/anomaly_detection/_cblof.py @@ -16,15 +16,16 @@ class CBLOF(PyODAdapter): This class implements the CBLOF algorithm for anomaly detection using PyODAdadpter to be used in the aeon framework. All parameters are passed to - the PyOD model ``CBLOF`` except for `window_size` and `stride`, which are used to - construct the sliding windows. + the PyOD model ``CBLOF`` except for ``window_size`` and ``stride``, + which are used to construct the sliding windows. The documentation for parameters has been adapted from the - [PyOD documentation](https://pyod.readthedocs.io/en/latest/pyod.models.html#id117). - Here, `X` refers to the set of sliding windows extracted from the time series + `PyOD documentation `_ + + Here, ``X`` refers to the set of sliding windows extracted from the time series using :func:`aeon.utils.windowing.sliding_windows` with the parameters - ``window_size`` and ``stride``. The internal `X` has the shape - `(n_windows, window_size * n_channels)`. + ``window_size`` and ``stride``. The internal ``X`` has the shape + ``(n_windows, window_size * n_channels)``. Parameters ---------- @@ -52,21 +53,21 @@ class CBLOF(PyODAdapter): beta : int or float in (1,), default=5 Coefficient for deciding small and large clusters. For a list - sorted clusters by size `|C1|, \|C2|, ..., |Cn|, beta = |Ck|/|Ck-1|` + sorted clusters by size ``|C1|, |C2|, ..., |Cn|, beta = |Ck|/|Ck-1|`` use_weights : bool, default=False - If set to True, the size of clusters are used as weights in + If set to ``True``, the size of clusters are used as weights in outlier score calculation. check_estimator : bool, default=False - If set to True, check whether the base estimator is consistent with + If set to ``True``, check whether the base estimator is consistent with sklearn standard. random_state : int, np.RandomState or None, default=None - If int, random_state is the seed used by the random + If ``int``, random_state is the seed used by the random number generator; If RandomState instance, random_state is the random - number generator; If None, the random number generator is the - RandomState instance used by `np.random`. + number generator; If ``None``, the random number generator is the + RandomState instance used by ``np.random``. window_size : int, default=10 Size of the sliding window. @@ -135,15 +136,16 @@ def _get_test_params(cls, parameter_set="default"): ---------- parameter_set : str, default="default" Name of the set of test parameters to return, for use in tests. If no - special parameters are defined for a value, will return `"default"` set. + special parameters are defined for a value, will return ``"default"`` set. Returns ------- params : dict Parameters to create testing instances of the class. Each dict are parameters to construct an "interesting" test instance, i.e., - `MyClass(**params)` or `MyClass(**params[i])` creates a valid test instance. - `create_test_instance` uses the first (or only) dictionary in `params`. + ``MyClass(**params)`` or ``MyClass(**params[i])`` creates a valid test + instance. ``create_test_instance`` uses the first (or only) dictionary + in ``params``. """ return { "n_clusters": 4, diff --git a/aeon/anomaly_detection/_copod.py b/aeon/anomaly_detection/_copod.py index ee448b96b8..6c99e8f7ee 100644 --- a/aeon/anomaly_detection/_copod.py +++ b/aeon/anomaly_detection/_copod.py @@ -15,8 +15,8 @@ class COPOD(PyODAdapter): """COPOD for anomaly detection. This class implements the COPOD using PyODAdadpter to be used in the aeon framework. - The parameter `n_jobs` is passed to COPOD model from PyOD, `window_size` and - `stride` are used to construct the sliding windows. + The parameter ``n_jobs`` is passed to COPOD model from PyOD, ``window_size`` and + ``stride`` are used to construct the sliding windows. Parameters ---------- @@ -66,14 +66,15 @@ def _get_test_params(cls, parameter_set="default") -> dict: ---------- parameter_set : str, default="default" Name of the set of test parameters to return, for use in tests. If no - special parameters are defined for a value, will return `"default"` set. + special parameters are defined for a value, will return ``"default"`` set. Returns ------- params : dict or list of dict, default={} Parameters to create testing instances of the class. Each dict are parameters to construct an "interesting" test instance, i.e., - `MyClass(**params)` or `MyClass(**params[i])` creates a valid test instance. - `create_test_instance` uses the first (or only) dictionary in `params`. + ``MyClass(**params)`` or ``MyClass(**params[i])`` creates a valid test + instance. ``create_test_instance`` uses the first (or only) dictionary + in ``params``. """ return {} diff --git a/aeon/anomaly_detection/_dwt_mlead.py b/aeon/anomaly_detection/_dwt_mlead.py index e78bb1d7d9..a0fae5b317 100644 --- a/aeon/anomaly_detection/_dwt_mlead.py +++ b/aeon/anomaly_detection/_dwt_mlead.py @@ -235,14 +235,15 @@ def _get_test_params(cls, parameter_set="default"): ---------- parameter_set : str, default="default" Name of the set of test parameters to return, for use in tests. If no - special parameters are defined for a value, will return `"default"` set. + special parameters are defined for a value, will return ``"default"`` set. Returns ------- params : dict or list of dict, default={} Parameters to create testing instances of the class. Each dict are parameters to construct an "interesting" test instance, i.e., - `MyClass(**params)` or `MyClass(**params[i])` creates a valid test instance. + ``MyClass(**params)`` or ``MyClass(**params[i])`` creates a valid + test instance. """ return { "start_level": 2, diff --git a/aeon/anomaly_detection/_iforest.py b/aeon/anomaly_detection/_iforest.py index a410c3542d..73b39a3118 100644 --- a/aeon/anomaly_detection/_iforest.py +++ b/aeon/anomaly_detection/_iforest.py @@ -16,15 +16,15 @@ class IsolationForest(PyODAdapter): This class implements the Isolation Forest algorithm for anomaly detection using PyODAdadpter to be used in the aeon framework. All parameters are passed to - the PyOD model ``IForest`` except for `window_size` and `stride`, which are used to - construct the sliding windows. + the PyOD model ``IForest`` except for ``window_size`` and ``stride``, + which are used to construct the sliding windows. The documentation for parameters has been adapted from the [PyOD documentation](https://pyod.readthedocs.io/en/latest/pyod.models.html#id405). - Here, `X` refers to the set of sliding windows extracted from the time series + Here, ``X`` refers to the set of sliding windows extracted from the time series using :func:`aeon.utils.windowing.sliding_windows` with the parameters - ``window_size`` and ``stride``. The internal `X` has the shape - `(n_windows, window_size * n_channels)`. + ``window_size`` and ``stride``. The internal ``X`` has the shape + ``(n_windows, window_size * n_channels)``. Parameters ---------- @@ -34,9 +34,9 @@ class IsolationForest(PyODAdapter): max_samples : int, float or "auto", default="auto" The number of samples to draw from X to train each base estimator. - - If int, then draw `max_samples` samples. - - If float, then draw `max_samples * X.shape[0]` samples. - - If "auto", then `max_samples=min(256, n_samples)`. + - If ``int``, then draw ``max_samples`` samples. + - If ``float``, then draw ``max_samples * X.shape[0]`` samples. + - If ``"auto"``, then ``max_samples=min(256, n_samples)``. If max_samples is larger than the number of samples provided, all samples will be used for all trees (no sampling). @@ -44,24 +44,24 @@ class IsolationForest(PyODAdapter): max_features : int or float, default=1.0 The number of features to draw from X to train each base estimator. - - If int, then draw `max_features` features. - - If float, then draw `max_features * X.shape[1]` features. + - If ``int``, then draw ``max_features`` features. + - If ``float``, then draw ``max_features * X.shape[1]`` features. bootstrap : bool, default=False - If True, individual trees are fit on random subsets of the training + If ``True``, individual trees are fit on random subsets of the training data sampled with replacement. If False, sampling without replacement is performed. n_jobs : int, default=1 - The number of jobs to run in parallel for both `fit` and `predict`. - If -1, then the number of jobs is set to the number of cores. + The number of jobs to run in parallel for both ``fit`` and ``predict``. + If ``-1``, then the number of jobs is set to the number of cores. random_state : int, np.RandomState or None, default=None - If int, random_state is the seed used by the random number generator; + If ``int``, random_state is the seed used by the random number generator; If RandomState instance, random_state is the random number generator; - If None, the random number generator is the RandomState instance used - by `np.random`. + If ``None``, the random number generator is the RandomState instance used + by ``np.random``. verbose : int, default=0 Controls the verbosity of the tree building process. @@ -135,14 +135,14 @@ def _get_test_params(cls, parameter_set="default"): ---------- parameter_set : str, default="default" Name of the set of test parameters to return, for use in tests. If no - special parameters are defined for a value, will return `"default"` set. + special parameters are defined for a value, will return ``"default"`` set. Returns ------- params : dict Parameters to create testing instances of the class. Each dict are parameters to construct an "interesting" test instance, i.e., - `IsolationForest(**params)` creates a valid test instance. + ``IsolationForest(**params)`` creates a valid test instance. """ return { "n_estimators": 10, diff --git a/aeon/anomaly_detection/_kmeans.py b/aeon/anomaly_detection/_kmeans.py index c114911c3b..dc2d108bd5 100644 --- a/aeon/anomaly_detection/_kmeans.py +++ b/aeon/anomaly_detection/_kmeans.py @@ -168,14 +168,15 @@ def _get_test_params(cls, parameter_set="default"): ---------- parameter_set : str, default="default" Name of the set of test parameters to return, for use in tests. If no - special parameters are defined for a value, will return `"default"` set. + special parameters are defined for a value, will return ``"default"`` set. Returns ------- dict or list of dict, default={} Parameters to create testing instances of the class. Each dict are parameters to construct an "interesting" test instance, i.e., - `MyClass(**params)` or `MyClass(**params[i])` creates a valid test instance. + ``MyClass(**params)`` or ``MyClass(**params[i])`` creates a valid + test instance. """ return { "n_clusters": 5, diff --git a/aeon/anomaly_detection/_lof.py b/aeon/anomaly_detection/_lof.py index 99ac068584..db78a15633 100644 --- a/aeon/anomaly_detection/_lof.py +++ b/aeon/anomaly_detection/_lof.py @@ -19,15 +19,15 @@ class LOF(PyODAdapter): The documentation for parameters has been adapted from the [PyOD documentation](https://pyod.readthedocs.io/en/latest/pyod.models.html#id586). - Here, `X` refers to the set of sliding windows extracted from the time series + Here, ``X`` refers to the set of sliding windows extracted from the time series using :func:`aeon.utils.windowing.sliding_windows` with the parameters - ``window_size`` and ``stride``. The internal `X` has the shape - `(n_windows, window_size * n_channels)`. + ``window_size`` and ``stride``. The internal ``X`` has the shape + ``(n_windows, window_size * n_channels)``. Parameters ---------- n_neighbors : int, optional (default=20) - Number of neighbors to use by default for `kneighbors` queries. + Number of neighbors to use by default for ``kneighbors`` queries. If n_neighbors is larger than the number of samples provided, all samples will be used. algorithm : {'auto', 'ball_tree', 'kd_tree', 'brute'}, optional @@ -36,11 +36,11 @@ class LOF(PyODAdapter): - 'kd_tree' will use KDTree - 'brute' will use a brute-force search. - 'auto' will attempt to decide the most appropriate algorithm - based on the values passed to :meth:`fit` method. + based on the values passed to :meth:``fit`` method. Note: fitting on sparse input will override the setting of this parameter, using brute force. leaf_size : int, optional (default=30) - Leaf size passed to `BallTree` or `KDTree`. This can + Leaf size passed to ``BallTree`` or ``KDTree``. This can affect the speed of the construction and query, as well as the memory required to store the tree. The optimal value depends on the nature of the problem. diff --git a/aeon/anomaly_detection/_merlin.py b/aeon/anomaly_detection/_merlin.py index 5928d156d6..8ec9ecda68 100644 --- a/aeon/anomaly_detection/_merlin.py +++ b/aeon/anomaly_detection/_merlin.py @@ -205,13 +205,14 @@ def _get_test_params(cls, parameter_set="default"): ---------- parameter_set : str, default="default" Name of the set of test parameters to return, for use in tests. If no - special parameters are defined for a value, will return `"default"` set. + special parameters are defined for a value, will return ``"default"`` set. Returns ------- params : dict or list of dict, default={} Parameters to create testing instances of the class. Each dict are parameters to construct an "interesting" test instance, i.e., - `MyClass(**params)` or `MyClass(**params[i])` creates a valid test instance. + ``MyClass(**params)`` or ``MyClass(**params[i])`` creates a valid + test instance. """ return {"min_length": 4, "max_length": 7} diff --git a/aeon/anomaly_detection/_one_class_svm.py b/aeon/anomaly_detection/_one_class_svm.py index 9e654ee326..95177d7225 100644 --- a/aeon/anomaly_detection/_one_class_svm.py +++ b/aeon/anomaly_detection/_one_class_svm.py @@ -16,15 +16,15 @@ class OneClassSVM(BaseAnomalyDetector): This class implements the OneClassSVM algorithm for anomaly detection from sklearn to be used in the aeon framework. All parameters are passed to - the sklearn ``OneClassSVM`` except for `window_size` and `stride`, which are used to - construct the sliding windows. + the sklearn ``OneClassSVM`` except for ``window_size`` and ``stride``, + which are used to construct the sliding windows. The documentation for parameters has been adapted from (https://scikit-learn.org/dev/modules/generated/sklearn.svm.OneClassSVM.html). - Here, `X` refers to the set of sliding windows extracted from the time series + Here, ``X`` refers to the set of sliding windows extracted from the time series using :func:`aeon.utils.windowing.sliding_windows` with the parameters - ``window_size`` and ``stride``. The internal `X` has the shape - `(n_windows, window_size * n_channels)`. + ``window_size`` and ``stride``. The internal ``X`` has the shape + ``(n_windows, window_size * n_channels)``. Parameters ---------- @@ -43,8 +43,8 @@ class OneClassSVM(BaseAnomalyDetector): - if ``gamma='scale'`` (default) is passed then it uses 1 / (n_features * X.var()) as value of gamma, - - if 'auto', uses 1 / n_features - - if float, must be non-negative. + - if ``"auto"``, uses 1 / n_features + - if ``float``, must be non-negative. .. versionchanged:: 0.22 The default value of ``gamma`` changed from 'auto' to 'scale'. diff --git a/aeon/anomaly_detection/_pyodadapter.py b/aeon/anomaly_detection/_pyodadapter.py index c520cc6f19..d085c54fee 100644 --- a/aeon/anomaly_detection/_pyodadapter.py +++ b/aeon/anomaly_detection/_pyodadapter.py @@ -40,9 +40,9 @@ class PyODAdapter(BaseAnomalyDetector): target time series with the same number of dimensions. The reference (or training) time series does not need to be clean for most PyOD models. However, knowledge in form of anomaly labels about the potential existing anomalies in the reference time - series are not used during the fitting process. Use `fit` to fit the model on the - reference time series and `predict` to detect anomalies in the target time series. - For unsupervised anomaly detection, use `fit_predict` directly on the target time + series are not used during the fitting process. Use ``fit`` to fit the model on the + reference time series and ``predict`` to detect anomalies in the target time series. + For unsupervised anomaly detection, use ``fit_predict`` directly on the target time series. @@ -155,14 +155,15 @@ def _get_test_params(cls, parameter_set="default"): ---------- parameter_set : str, default="default" Name of the set of test parameters to return, for use in tests. If no - special parameters are defined for a value, will return `"default"` set. + special parameters are defined for a value, will return ``"default"`` set. Returns ------- params : dict or list of dict, default={} Parameters to create testing instances of the class. Each dict are parameters to construct an "interesting" test instance, i.e., - `MyClass(**params)` or `MyClass(**params[i])` creates a valid test instance. + ``MyClass(**params)`` or ``MyClass(**params[i])`` creates a valid + test instance. """ _check_soft_dependencies(*cls._tags["python_dependencies"]) diff --git a/aeon/anomaly_detection/_stomp.py b/aeon/anomaly_detection/_stomp.py index af39891149..667d118b42 100644 --- a/aeon/anomaly_detection/_stomp.py +++ b/aeon/anomaly_detection/_stomp.py @@ -115,14 +115,15 @@ def _get_test_params(cls, parameter_set="default"): ---------- parameter_set : str, default="default" Name of the set of test parameters to return, for use in tests. If no - special parameters are defined for a value, will return `"default"` set. + special parameters are defined for a value, will return ``"default"`` set. Returns ------- params : dict or list of dict, default={} Parameters to create testing instances of the class. Each dict are parameters to construct an "interesting" test instance, i.e., - `MyClass(**params)` or `MyClass(**params[i])` creates a valid test instance. + ``MyClass(**params)`` or ``MyClass(**params[i])`` creates a valid + test instance. """ return { "window_size": 10, diff --git a/aeon/anomaly_detection/base.py b/aeon/anomaly_detection/base.py index 2e333cf755..4c3bbe9202 100644 --- a/aeon/anomaly_detection/base.py +++ b/aeon/anomaly_detection/base.py @@ -34,13 +34,13 @@ class BaseAnomalyDetector(BaseSeriesEstimator): Output data format (one of the following): Anomaly scores (default): - np.ndarray, shape ``(m,)`` of type float. For each point of the input time - series, the anomaly score is a float value indicating the degree of - anomalousness. The higher the score, the more anomalous the point. + np.ndarray, shape ``(m,)`` of type ``float``. For each point of the + input time series, the anomaly score is a ``float`` value indicating the + degree of anomalousness. The higher the score, the more anomalous the point. Binary classification: - np.ndarray, shape ``(m,)`` of type bool or int. For each point of the input - time series, the output is a boolean or integer value indicating whether the - point is anomalous (``True``/``1``) or not (``False``/``0``). + np.ndarray, shape ``(m,)`` of type ``bool`` or ``int``. For each point of + the input time series, the output is a boolean or integer value indicating + whether the point is anomalous (``True``/``1``) or not (``False``/``0``). Detector learning types: Unsupervised (default): @@ -156,8 +156,9 @@ def predict(self, X, axis=1) -> np.ndarray: Returns ------- np.ndarray - A boolean, int or float array of length len(X), where each element indicates - whether the corresponding subsequence is anomalous or its anomaly score. + A boolean, ``int`` or ``float`` array of length ``len(X)``, where each + element indicates whether the corresponding subsequence is anomalous or its + anomaly score. """ fit_empty = self.get_tag("fit_is_empty") if not fit_empty: @@ -191,8 +192,10 @@ def fit_predict(self, X, y=None, axis=1) -> np.ndarray: Returns ------- np.ndarray - A boolean, int or float array of length len(X), where each element indicates - whether the corresponding subsequence is anomalous or its anomaly score. + A ``boolean``, ``int`` or ``float`` array of length ``len(X)``, where each + element indicates whether the corresponding subsequence is anomalous or + its anomaly score. + """ if self.get_tag("requires_y"): if y is None: