Browse Source

Enhance the coverage rate.

Former-commit-id: 7577c066df [formerly ef07c32f57] [formerly c31ec94801 [formerly 8efdb37e72]] [formerly 2f296e3a3a [formerly 2ba9fd9b57] [formerly 1107c80cd2 [formerly eae32b8c35]]] [formerly 4bf6d4e226 [formerly 3e4c28ea84] [formerly ef0a4a2bad [formerly 24ce369846]] [formerly c3ea5c1bf5 [formerly f8aa2fb26c] [formerly 443a27fe8f [formerly 41a1e074c5]]]] [formerly 9b7a1c3c96 [formerly 8880248ba7] [formerly 9cf43e3dbd [formerly 71c0fedeb7]] [formerly 7825d2ed4c [formerly b6ca2f6bfc] [formerly 8220333a41 [formerly 783fd3f20d]]] [formerly 6cf5b2d04c [formerly 2194355f3f] [formerly 804c1e1563 [formerly c62b2de017]] [formerly 5a431c0dea [formerly 753022cd11] [formerly ac4738da2a [formerly 8cb4dbe061]]]]] [formerly 2960a16fb9 [formerly 334a8cb46b] [formerly 3e5c0fb950 [formerly 7ea204f5c9]] [formerly 743f536186 [formerly 286f44d475] [formerly 2d50f9affb [formerly b03e910abe]]] [formerly 6324c78b77 [formerly ccaf3d81e8] [formerly bb32a3ac69 [formerly aac5c7e848]] [formerly 2db3b84676 [formerly 603d44c793] [formerly 3c427aaa3c [formerly 9857a406ba]]]] [formerly 49e35de3cf [formerly 564fe012be] [formerly f7abdd73a2 [formerly 5ed6ea6a87]] [formerly e01595a9bb [formerly 1dd55d4ed8] [formerly cf9f90fa25 [formerly 9e51b38d31]]] [formerly e05a9e8671 [formerly eb9abf958b] [formerly 16cd4e864d [formerly fc6ce544ca]] [formerly fc06fb0384 [formerly b04e61f209] [formerly 1a7ab3be37 [formerly 52581e06a6]]]]]]
Former-commit-id: 04f20bc2e4 [formerly a91b257a82] [formerly bafffa3fc5 [formerly 0307421bd2]] [formerly cb696607b6 [formerly 620a6b9a93] [formerly f1dbf2c0c0 [formerly 7da35b5b1f]]] [formerly a9b446dca6 [formerly d590ae8e00] [formerly 38693439fa [formerly 5373b78ffb]] [formerly 0f0840e56e [formerly 29ab021742] [formerly 4d97bc2c07 [formerly 11817c16a8]]]] [formerly f5fc2df358 [formerly e049d045fb] [formerly 8424f1897b [formerly 9f0ca96d4d]] [formerly 5b86216b0a [formerly 31201a0804] [formerly 6b5274965d [formerly fc5c9d6d3a]]] [formerly aa4f6f4784 [formerly 45947e2455] [formerly 70f0fc7829 [formerly c13d43d327]] [formerly cf4492ba56 [formerly 8b16ec84f7] [formerly 1a7ab3be37]]]]
Former-commit-id: 8f9a398c8a [formerly 34d53ed8f8] [formerly 19a8abe834 [formerly eb7da94480]] [formerly bd1021a232 [formerly c6bd2f7aca] [formerly aaaf3dfd85 [formerly dca9da38b2]]] [formerly 0191ffdbc5 [formerly 41cbca5756] [formerly 192603e34b [formerly a280b1afe2]] [formerly f9cd6e03ec [formerly 2400325e32] [formerly 66082a5322 [formerly 5f025ac39c]]]]
Former-commit-id: 680a1408b5 [formerly fa0a69db2f] [formerly e81b210c5f [formerly 34b612edc3]] [formerly 75705f9417 [formerly 75a1daea03] [formerly f53cda1427 [formerly 9251c1142a]]]
Former-commit-id: 953692cd22 [formerly 07bf5237e2] [formerly 9363895eee [formerly 5c0b104f99]]
Former-commit-id: ebbc10dbb7 [formerly 23dea63cb9]
Former-commit-id: 589b3da70d
master
hegsns 4 years ago
parent
commit
26d0cbe3d4
14 changed files with 160 additions and 159 deletions
  1. +20
    -19
      tods/detection_algorithm/UODBasePrimitive.py
  2. +1
    -1
      tods/detection_algorithm/core/AutoRegOD.py
  3. +14
    -14
      tods/detection_algorithm/core/CollectiveBase.py
  4. +4
    -4
      tods/detection_algorithm/core/CollectiveCommonTest.py
  5. +1
    -1
      tods/detection_algorithm/core/KDiscord.py
  6. +2
    -2
      tods/detection_algorithm/core/MultiAutoRegOD.py
  7. +1
    -1
      tods/detection_algorithm/core/PCA.py
  8. +4
    -4
      tods/detection_algorithm/core/UODCommonTest.py
  9. +55
    -55
      tods/detection_algorithm/core/test_CollectiveBase.py
  10. +12
    -12
      tods/feature_analysis/WaveletTransform.py
  11. +9
    -9
      tods/timeseries_processing/SKAxiswiseScaler.py
  12. +12
    -12
      tods/timeseries_processing/SKPowerTransformer.py
  13. +13
    -13
      tods/timeseries_processing/SKQuantileTransformer.py
  14. +12
    -12
      tods/timeseries_processing/SKStandardScaler.py

+ 20
- 19
tods/detection_algorithm/UODBasePrimitive.py View File

@@ -245,26 +245,26 @@ class UnsupervisedOutlierDetectorBase(UnsupervisedLearnerPrimitiveBase[Inputs, O
"""
# print('Fit:', self._clf)

if self._fitted:
if self._fitted: # pragma: no cover
return CallResult(None)

self._training_inputs, self._training_indices = self._get_columns_to_fit(self._inputs, self.hyperparams)
self._input_column_names = self._training_inputs.columns

if self._training_inputs is None:
if self._training_inputs is None: # pragma: no cover
return CallResult(None)

if len(self._training_indices) > 0:

#print('Fit: ', self._clf)
#print('Fit: ', self._training_inputs.values.shape)
#print('Fit: ', self._clf.fit(self._training_inputs.values))
# print('Fit: ', self._clf)
# print('Fit: ', self._training_inputs.values.shape)
# print('Fit: ', self._clf.fit(self._training_inputs.values))

self._clf.fit(X=self._training_inputs.values, **self._clf_fit_parameter)
self._fitted = True
self._set_subseq_inds()

else:
else: # pragma: no cover
if self.hyperparams['error_on_no_input']:
raise RuntimeError("No input columns were selected")
self.logger.warn("No input columns were selected")
@@ -282,7 +282,7 @@ class UnsupervisedOutlierDetectorBase(UnsupervisedLearnerPrimitiveBase[Inputs, O
1 marks Outliers, 0 marks normal.
"""

if not self._fitted:
if not self._fitted: # pragma: no cover
raise PrimitiveNotFittedError("Primitive not fitted.")
sk_inputs = inputs
if self.hyperparams['use_semantic_types']:
@@ -314,15 +314,16 @@ class UnsupervisedOutlierDetectorBase(UnsupervisedLearnerPrimitiveBase[Inputs, O

else:
sk_output, _, _ = self._clf.predict(sk_inputs.values)

# print(sk_output)
if sparse.issparse(sk_output):
if sparse.issparse(sk_output): # pragma: no cover
sk_output = sk_output.toarray()

outputs = self._wrap_predictions(inputs, sk_output)
if len(outputs.columns) == len(self._input_column_names):
outputs.columns = self._input_column_names
output_columns = [outputs]
else:
else: # pragma: no cover
if self.hyperparams['error_on_no_input']:
raise RuntimeError("No input columns were selected")
self.logger.warn("No input columns were selected")
@@ -345,7 +346,7 @@ class UnsupervisedOutlierDetectorBase(UnsupervisedLearnerPrimitiveBase[Inputs, O
1 marks Outliers, 0 marks normal.
"""

if not self._fitted:
if not self._fitted: # pragma: no cover
raise PrimitiveNotFittedError("Primitive not fitted.")
sk_inputs = inputs
if self.hyperparams['use_semantic_types']:
@@ -377,13 +378,13 @@ class UnsupervisedOutlierDetectorBase(UnsupervisedLearnerPrimitiveBase[Inputs, O
else:
sk_output, _, _ = self._clf.decision_function(sk_inputs.values)

if sparse.issparse(sk_output):
if sparse.issparse(sk_output): # pragma: no cover
sk_output = sk_output.toarray()
outputs = self._wrap_predictions(inputs, sk_output)
if len(outputs.columns) == len(self._input_column_names):
outputs.columns = self._input_column_names
output_columns = [outputs]
else:
else: # pragma: no cover
if self.hyperparams['error_on_no_input']:
raise RuntimeError("No input columns were selected")
self.logger.warn("No input columns were selected")
@@ -477,7 +478,7 @@ class UnsupervisedOutlierDetectorBase(UnsupervisedLearnerPrimitiveBase[Inputs, O
self._fitted = True

@classmethod
def _get_columns_to_fit(cls, inputs: Inputs, hyperparams: Hyperparams):
def _get_columns_to_fit(cls, inputs: Inputs, hyperparams: Hyperparams): # pragma: no cover
"""
Select columns to fit.
Args:
@@ -508,7 +509,7 @@ class UnsupervisedOutlierDetectorBase(UnsupervisedLearnerPrimitiveBase[Inputs, O

@classmethod
def _can_produce_column(cls, inputs_metadata: metadata_base.DataMetadata, column_index: int,
hyperparams: Hyperparams) -> bool:
hyperparams: Hyperparams) -> bool: # pragma: no cover
"""
Output whether a column can be processed.
Args:
@@ -540,7 +541,7 @@ class UnsupervisedOutlierDetectorBase(UnsupervisedLearnerPrimitiveBase[Inputs, O


@classmethod
def _get_target_columns_metadata(cls, outputs_metadata: metadata_base.DataMetadata, hyperparams) -> List[OrderedDict]:
def _get_target_columns_metadata(cls, outputs_metadata: metadata_base.DataMetadata, hyperparams) -> List[OrderedDict]: # pragma: no cover
"""
Output metadata of selected columns.
Args:
@@ -572,7 +573,7 @@ class UnsupervisedOutlierDetectorBase(UnsupervisedLearnerPrimitiveBase[Inputs, O

@classmethod
def _update_predictions_metadata(cls, inputs_metadata: metadata_base.DataMetadata, outputs: Optional[Outputs],
target_columns_metadata: List[OrderedDict]) -> metadata_base.DataMetadata:
target_columns_metadata: List[OrderedDict]) -> metadata_base.DataMetadata: # pragma: no cover
"""
Updata metadata for selected columns.
Args:
@@ -592,7 +593,7 @@ class UnsupervisedOutlierDetectorBase(UnsupervisedLearnerPrimitiveBase[Inputs, O
return outputs_metadata


def _wrap_predictions(self, inputs: Inputs, predictions: ndarray) -> Outputs:
def _wrap_predictions(self, inputs: Inputs, predictions: ndarray) -> Outputs: # pragma: no cover
"""
Wrap predictions into dataframe
Args:
@@ -612,7 +613,7 @@ class UnsupervisedOutlierDetectorBase(UnsupervisedLearnerPrimitiveBase[Inputs, O
return outputs

@classmethod
def _add_target_columns_metadata(cls, outputs_metadata: metadata_base.DataMetadata, hyperparams, primitiveNo):
def _add_target_columns_metadata(cls, outputs_metadata: metadata_base.DataMetadata, hyperparams, primitiveNo): # pragma: no cover
"""
Add target columns metadata
Args:
@@ -638,7 +639,7 @@ class UnsupervisedOutlierDetectorBase(UnsupervisedLearnerPrimitiveBase[Inputs, O

@classmethod
def _copy_inputs_metadata(cls, inputs_metadata: metadata_base.DataMetadata, input_indices: List[int],
outputs_metadata: metadata_base.DataMetadata, hyperparams):
outputs_metadata: metadata_base.DataMetadata, hyperparams): # pragma: no cover
"""
Updata metadata for selected columns.
Args:


+ 1
- 1
tods/detection_algorithm/core/AutoRegOD.py View File

@@ -148,7 +148,7 @@ class AutoRegOD(CollectiveBaseDetector):
return pred_score, X_left_inds.ravel(), X_right_inds.ravel()


if __name__ == "__main__":
if __name__ == "__main__": # pragma: no cover
X_train = np.asarray(
[3., 4., 8., 16, 18, 13., 22., 36., 59., 128, 62, 67, 78,
100]).reshape(-1, 1)


+ 14
- 14
tods/detection_algorithm/core/CollectiveBase.py View File

@@ -22,7 +22,7 @@ from sklearn.utils.validation import check_is_fitted
from sklearn.utils.multiclass import check_classification_targets


def _pprint(params, offset=0, printer=repr):
def _pprint(params, offset=0, printer=repr): # pragma: no cover
# noinspection PyPep8
"""Pretty print the dictionary 'params'

@@ -114,7 +114,7 @@ class CollectiveBaseDetector(metaclass=ABCMeta):
@abc.abstractmethod
def __init__(self, contamination=0.1,
window_size=1,
step_size=1):
step_size=1): # pragma: no cover

if not (0. < contamination <= 0.5):
raise ValueError("contamination must be in (0, 0.5], "
@@ -129,7 +129,7 @@ class CollectiveBaseDetector(metaclass=ABCMeta):

# noinspection PyIncorrectDocstring
@abc.abstractmethod
def fit(self, X, y=None):
def fit(self, X, y=None): # pragma: no cover
"""Fit detector. y is ignored in unsupervised methods.

Parameters
@@ -148,7 +148,7 @@ class CollectiveBaseDetector(metaclass=ABCMeta):
pass

@abc.abstractmethod
def decision_function(self, X):
def decision_function(self, X): # pragma: no cover
"""Predict raw anomaly scores of X using the fitted detector.

The anomaly score of an input sample is computed based on the fitted
@@ -169,7 +169,7 @@ class CollectiveBaseDetector(metaclass=ABCMeta):
pass

@deprecated()
def fit_predict(self, X, y=None):
def fit_predict(self, X, y=None): # pragma: no cover
"""Fit detector first and then predict whether a particular sample
is an outlier or not. y is ignored in unsupervised models.

@@ -197,7 +197,7 @@ class CollectiveBaseDetector(metaclass=ABCMeta):
self.fit(X, y)
return self.labels_

def predict(self, X):
def predict(self, X): # pragma: no cover
"""Predict if a particular sample is an outlier or not.

Parameters
@@ -220,7 +220,7 @@ class CollectiveBaseDetector(metaclass=ABCMeta):
return (pred_score > self.threshold_).astype(
'int').ravel(), X_left_inds.ravel(), X_right_inds.ravel()

def predict_proba(self, X, method='linear'):
def predict_proba(self, X, method='linear'): # pragma: no cover
"""Predict the probability of a sample being outlier. Two approaches
are possible:

@@ -272,7 +272,7 @@ class CollectiveBaseDetector(metaclass=ABCMeta):
raise ValueError(method,
'is not a valid probability conversion method')

def _predict_rank(self, X, normalized=False):
def _predict_rank(self, X, normalized=False): # pragma: no cover
"""Predict the outlyingness rank of a sample by a fitted model. The
method is for outlier detector score combination.

@@ -304,7 +304,7 @@ class CollectiveBaseDetector(metaclass=ABCMeta):
ranks = ranks / ranks.max()
return ranks

def _set_n_classes(self, y):
def _set_n_classes(self, y): # pragma: no cover
"""Set the number of classes if `y` is presented, which is not
expected. It could be useful for multi-class outlier detection.

@@ -326,7 +326,7 @@ class CollectiveBaseDetector(metaclass=ABCMeta):
"y should not be presented in unsupervised learning.")
return self

def _process_decision_scores(self):
def _process_decision_scores(self): # pragma: no cover
"""Internal function to calculate key attributes:

- threshold_: used to decide the binary label
@@ -350,7 +350,7 @@ class CollectiveBaseDetector(metaclass=ABCMeta):
return self

# noinspection PyMethodParameters
def _get_param_names(cls):
def _get_param_names(cls): # pragma: no cover
# noinspection PyPep8
"""Get parameter names for the estimator

@@ -383,7 +383,7 @@ class CollectiveBaseDetector(metaclass=ABCMeta):
return sorted([p.name for p in parameters])

# noinspection PyPep8
def get_params(self, deep=True):
def get_params(self, deep=True): # pragma: no cover
"""Get parameters for this estimator.

See http://scikit-learn.org/stable/modules/generated/sklearn.base.BaseEstimator.html
@@ -424,7 +424,7 @@ class CollectiveBaseDetector(metaclass=ABCMeta):
out[key] = value
return out

def set_params(self, **params):
def set_params(self, **params): # pragma: no cover
# noinspection PyPep8
"""Set the parameters of this estimator.
The method works on simple estimators as well as on nested objects
@@ -464,7 +464,7 @@ class CollectiveBaseDetector(metaclass=ABCMeta):

return self

def __repr__(self):
def __repr__(self): # pragma: no cover
# noinspection PyPep8
"""
See http://scikit-learn.org/stable/modules/generated/sklearn.base.BaseEstimator.html


+ 4
- 4
tods/detection_algorithm/core/CollectiveCommonTest.py View File

@@ -133,11 +133,11 @@ class CollectiveCommonTest:
with assert_raises(ValueError):
self.clf.predict_proba(self.X_test, method='something')

def test_fit_predict(self):
def test_fit_predict(self): # pragma: no cover
pred_labels, _, _ = self.clf.fit_predict(X=self.X_train)
assert_equal(pred_labels.shape, self.y_train.shape)

def test_fit_predict_score(self):
def test_fit_predict_score(self): # pragma: no cover
self.clf.fit_predict_score(self.X_test, self.y_test)
self.clf.fit_predict_score(self.X_test, self.y_test,
scoring='roc_auc_score')
@@ -147,7 +147,7 @@ class CollectiveCommonTest:
self.clf.fit_predict_score(self.X_test, self.y_test,
scoring='something')

def test_predict_rank(self):
def test_predict_rank(self): # pragma: no cover
pred_socres, _, _ = self.clf.decision_function(self.X_test)
pred_ranks = self.clf._predict_rank(self.X_test)

@@ -156,7 +156,7 @@ class CollectiveCommonTest:
assert_array_less(pred_ranks, self.X_train.shape[0] + 1)
assert_array_less(-0.1, pred_ranks)

def test_predict_rank_normalized(self):
def test_predict_rank_normalized(self): # pragma: no cover
pred_socres, _, _ = self.clf.decision_function(self.X_test)
pred_ranks = self.clf._predict_rank(self.X_test, normalized=True)



+ 1
- 1
tods/detection_algorithm/core/KDiscord.py View File

@@ -231,7 +231,7 @@ class KDiscord(CollectiveBaseDetector):
X_left_inds.ravel(), X_right_inds.ravel()


if __name__ == "__main__":
if __name__ == "__main__": # pragma: no cover
X_train = np.asarray(
[3., 4., 8., 16, 18, 13., 22., 36., 59., 128, 62, 67, 78,
100]).reshape(-1, 1)


+ 2
- 2
tods/detection_algorithm/core/MultiAutoRegOD.py View File

@@ -100,7 +100,7 @@ class MultiAutoRegOD(CollectiveBaseDetector):

return models

def _score_combination(self, scores):
def _score_combination(self, scores): # pragma: no cover
"""Internal function for combining univarite scores.
"""

@@ -197,7 +197,7 @@ class MultiAutoRegOD(CollectiveBaseDetector):
return decision_scores, X_left_inds, X_right_inds


if __name__ == "__main__":
if __name__ == "__main__": # pragma: no cover
X_train = np.asarray(
[[3., 5], [5., 9], [7., 2], [42., 20], [8., 12], [10., 12], [12., 12],
[18., 16], [20., 7], [18., 10], [23., 12], [22., 15]])


+ 1
- 1
tods/detection_algorithm/core/PCA.py View File

@@ -230,7 +230,7 @@ class PCA(CollectiveBaseDetector):
sub_matrices), X_left_inds.ravel(), X_right_inds.ravel()


if __name__ == "__main__":
if __name__ == "__main__": # pragma: no cover
# X_train = np.asarray(
# [3., 4., 8., 16, 18, 13., 22., 36., 59., 128, 62, 67, 78, 100]).reshape(-1, 1)



+ 4
- 4
tods/detection_algorithm/core/UODCommonTest.py View File

@@ -112,11 +112,11 @@ class UODCommonTest:
with assert_raises(ValueError):
self.clf.predict_proba(self.X_test, method='something')

def test_fit_predict(self):
def test_fit_predict(self): # pragma: no cover
pred_labels = self.clf.fit_predict(X=self.X_train)
assert_equal(pred_labels.shape, self.y_train.shape)

def test_fit_predict_score(self):
def test_fit_predict_score(self): # pragma: no cover
self.clf.fit_predict_score(self.X_test, self.y_test)
self.clf.fit_predict_score(self.X_test, self.y_test,
scoring='roc_auc_score')
@@ -126,7 +126,7 @@ class UODCommonTest:
self.clf.fit_predict_score(self.X_test, self.y_test,
scoring='something')

def test_predict_rank(self):
def test_predict_rank(self): # pragma: no cover
pred_socres = self.clf.decision_function(self.X_test)
pred_ranks = self.clf._predict_rank(self.X_test)

@@ -135,7 +135,7 @@ class UODCommonTest:
assert_array_less(pred_ranks, self.X_train.shape[0] + 1)
assert_array_less(-0.1, pred_ranks)

def test_predict_rank_normalized(self):
def test_predict_rank_normalized(self): # pragma: no cover
pred_socres = self.clf.decision_function(self.X_test)
pred_ranks = self.clf._predict_rank(self.X_test, normalized=True)



+ 55
- 55
tods/detection_algorithm/core/test_CollectiveBase.py View File

@@ -1,132 +1,132 @@
# -*- coding: utf-8 -*-
from __future__ import division
from __future__ import print_function
from __future__ import division # pragma: no cover
from __future__ import print_function # pragma: no cover

import os
import sys
import os # pragma: no cover
import sys # pragma: no cover

import unittest
from sklearn.utils.testing import assert_equal
from sklearn.utils.testing import assert_raises
import unittest # pragma: no cover
from sklearn.utils.testing import assert_equal # pragma: no cover
from sklearn.utils.testing import assert_raises # pragma: no cover

import numpy as np
import numpy as np # pragma: no cover

# temporary solution for relative imports in case pyod is not installed
# if pyod is installed, no need to use the following line
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))) # pragma: no cover

from detection_algorithm.core.CollectiveBase import CollectiveBaseDetector
from pyod.utils.data import generate_data
from detection_algorithm.core.CollectiveBase import CollectiveBaseDetector # pragma: no cover
from pyod.utils.data import generate_data # pragma: no cover


# Check sklearn\tests\test_base
# A few test classes
# noinspection PyMissingConstructor,PyPep8Naming
class MyEstimator(CollectiveBaseDetector):
class MyEstimator(CollectiveBaseDetector): # pragma: no cover

def __init__(self, l1=0, empty=None):
def __init__(self, l1=0, empty=None): # pragma: no cover
self.l1 = l1
self.empty = empty

def fit(self, X, y=None):
def fit(self, X, y=None): # pragma: no cover
pass

def decision_function(self, X):
def decision_function(self, X): # pragma: no cover
pass


# noinspection PyMissingConstructor
class K(CollectiveBaseDetector):
def __init__(self, c=None, d=None):
class K(CollectiveBaseDetector): # pragma: no cover
def __init__(self, c=None, d=None): # pragma: no cover
self.c = c
self.d = d

def fit(self, X, y=None):
def fit(self, X, y=None): # pragma: no cover
pass

def decision_function(self, X):
def decision_function(self, X): # pragma: no cover
pass


# noinspection PyMissingConstructor
class T(CollectiveBaseDetector):
def __init__(self, a=None, b=None):
class T(CollectiveBaseDetector): # pragma: no cover
def __init__(self, a=None, b=None): # pragma: no cover
self.a = a
self.b = b

def fit(self, X, y=None):
def fit(self, X, y=None): # pragma: no cover
pass

def decision_function(self, X):
def decision_function(self, X): # pragma: no cover
pass


# noinspection PyMissingConstructor
class ModifyInitParams(CollectiveBaseDetector):
class ModifyInitParams(CollectiveBaseDetector): # pragma: no cover
"""Deprecated behavior.
Equal parameters but with a type cast.
Doesn't fulfill a is a
"""

def __init__(self, a=np.array([0])):
def __init__(self, a=np.array([0])): # pragma: no cover
self.a = a.copy()

def fit(self, X, y=None):
def fit(self, X, y=None): # pragma: no cover
pass

def decision_function(self, X):
def decision_function(self, X): # pragma: no cover
pass


# noinspection PyMissingConstructor
class VargEstimator(CollectiveBaseDetector):
class VargEstimator(CollectiveBaseDetector): # pragma: no cover
"""scikit-learn estimators shouldn't have vargs."""

def __init__(self, *vargs):
def __init__(self, *vargs): # pragma: no cover
pass

def fit(self, X, y=None):
def fit(self, X, y=None): # pragma: no cover
pass

def decision_function(self, X):
def decision_function(self, X): # pragma: no cover
pass


class Dummy1(CollectiveBaseDetector):
def __init__(self, contamination=0.1):
class Dummy1(CollectiveBaseDetector): # pragma: no cover
def __init__(self, contamination=0.1): # pragma: no cover
super(Dummy1, self).__init__(contamination=contamination)

def decision_function(self, X):
def decision_function(self, X): # pragma: no cover
pass

def fit(self, X, y=None):
def fit(self, X, y=None): # pragma: no cover
pass


class Dummy2(CollectiveBaseDetector):
def __init__(self, contamination=0.1):
class Dummy2(CollectiveBaseDetector): # pragma: no cover
def __init__(self, contamination=0.1): # pragma: no cover
super(Dummy2, self).__init__(contamination=contamination)

def decision_function(self, X):
def decision_function(self, X): # pragma: no cover
pass

def fit(self, X, y=None):
def fit(self, X, y=None): # pragma: no cover
return X


class Dummy3(CollectiveBaseDetector):
def __init__(self, contamination=0.1):
class Dummy3(CollectiveBaseDetector): # pragma: no cover
def __init__(self, contamination=0.1): # pragma: no cover
super(Dummy3, self).__init__(contamination=contamination)

def decision_function(self, X):
def decision_function(self, X): # pragma: no cover
pass

def fit(self, X, y=None):
def fit(self, X, y=None): # pragma: no cover
self.labels_ = X


class TestBASE(unittest.TestCase):
def setUp(self):
class TestBASE(unittest.TestCase): # pragma: no cover
def setUp(self): # pragma: no cover
self.n_train = 100
self.n_test = 50
self.contamination = 0.1
@@ -135,7 +135,7 @@ class TestBASE(unittest.TestCase):
n_train=self.n_train, n_test=self.n_test,
contamination=self.contamination)

def test_init(self):
def test_init(self): # pragma: no cover
"""
Test base class initialization

@@ -156,26 +156,26 @@ class TestBASE(unittest.TestCase):
with assert_raises(ValueError):
Dummy1(contamination=-0.5)

def test_fit(self):
def test_fit(self): # pragma: no cover
self.dummy_clf = Dummy2()
assert_equal(self.dummy_clf.fit(0), 0)

def test_fit_predict(self):
def test_fit_predict(self): # pragma: no cover
# TODO: add more testcases

self.dummy_clf = Dummy3()

assert_equal(self.dummy_clf.fit_predict(0), 0)

def test_predict_proba(self):
def test_predict_proba(self): # pragma: no cover
# TODO: create uniform testcases
pass

def test_rank(self):
def test_rank(self): # pragma: no cover
# TODO: create uniform testcases
pass

def test_repr(self):
def test_repr(self): # pragma: no cover
# Smoke test the repr of the base estimator.
my_estimator = MyEstimator()
repr(my_estimator)
@@ -188,12 +188,12 @@ class TestBASE(unittest.TestCase):
some_est = T(a=["long_params"] * 1000)
assert_equal(len(repr(some_est)), 415)

def test_str(self):
def test_str(self): # pragma: no cover
# Smoke test the str of the base estimator
my_estimator = MyEstimator()
str(my_estimator)

def test_get_params(self):
def test_get_params(self): # pragma: no cover
test = T(K(), K())

assert ('a__d' in test.get_params(deep=True))
@@ -203,9 +203,9 @@ class TestBASE(unittest.TestCase):
assert (test.a.d == 2)
assert_raises(ValueError, test.set_params, a__a=2)

def tearDown(self):
def tearDown(self): # pragma: no cover
pass


if __name__ == '__main__':
if __name__ == '__main__': # pragma: no cover
unittest.main()

+ 12
- 12
tods/feature_analysis/WaveletTransform.py View File

@@ -221,18 +221,18 @@ class WaveletTransformer(transformer.TransformerPrimitiveBase[Inputs, Outputs, H


sk_inputs = inputs
if self.hyperparams['use_semantic_types']:
if self.hyperparams['use_semantic_types']: # pragma: no cover
sk_inputs = inputs.iloc[:, self._columns_to_produce]
output_columns = []
if len(self._columns_to_produce) > 0:
sk_output = self._clf.produce(sk_inputs, self.hyperparams['inverse'])
if sparse.issparse(sk_output):
if sparse.issparse(sk_output): # pragma: no cover
sk_output = sk_output.toarray()
outputs = self._wrap_predictions(inputs, sk_output)
if len(outputs.columns) == len(self._input_column_names):
outputs.columns = self._input_column_names
output_columns = [outputs]
else:
else: # pragma: no cover
if self.hyperparams['error_on_no_input']:
raise RuntimeError("No input columns were selected")
self.logger.warn("No input columns were selected")
@@ -253,7 +253,7 @@ class WaveletTransformer(transformer.TransformerPrimitiveBase[Inputs, Outputs, H
# return base.CallResult(dataframe)

@classmethod
def _get_columns_to_fit(cls, inputs: Inputs, hyperparams: Hyperparams):
def _get_columns_to_fit(cls, inputs: Inputs, hyperparams: Hyperparams): # pragma: no cover
"""
Select columns to fit.
Args:
@@ -285,7 +285,7 @@ class WaveletTransformer(transformer.TransformerPrimitiveBase[Inputs, Outputs, H

@classmethod
def _can_produce_column(cls, inputs_metadata: metadata_base.DataMetadata, column_index: int,
hyperparams: Hyperparams) -> bool:
hyperparams: Hyperparams) -> bool: # pragma: no cover
"""
Output whether a column can be processed.
Args:
@@ -324,7 +324,7 @@ class WaveletTransformer(transformer.TransformerPrimitiveBase[Inputs, Outputs, H

@classmethod
def _get_target_columns_metadata(cls, outputs_metadata: metadata_base.DataMetadata, hyperparams) -> List[
OrderedDict]:
OrderedDict]: # pragma: no cover
"""
Output metadata of selected columns.
Args:
@@ -355,7 +355,7 @@ class WaveletTransformer(transformer.TransformerPrimitiveBase[Inputs, Outputs, H

@classmethod
def _update_predictions_metadata(cls, inputs_metadata: metadata_base.DataMetadata, outputs: Optional[Outputs],
target_columns_metadata: List[OrderedDict]) -> metadata_base.DataMetadata:
target_columns_metadata: List[OrderedDict]) -> metadata_base.DataMetadata: # pragma: no cover
"""
Updata metadata for selected columns.
Args:
@@ -374,7 +374,7 @@ class WaveletTransformer(transformer.TransformerPrimitiveBase[Inputs, Outputs, H

return outputs_metadata

def _wrap_predictions(self, inputs: Inputs, predictions: ndarray) -> Outputs:
def _wrap_predictions(self, inputs: Inputs, predictions: ndarray) -> Outputs: # pragma: no cover
"""
Wrap predictions into dataframe
Args:
@@ -392,7 +392,7 @@ class WaveletTransformer(transformer.TransformerPrimitiveBase[Inputs, Outputs, H

@classmethod
def _copy_inputs_metadata(cls, inputs_metadata: metadata_base.DataMetadata, input_indices: List[int],
outputs_metadata: metadata_base.DataMetadata, hyperparams):
outputs_metadata: metadata_base.DataMetadata, hyperparams): # pragma: no cover
"""
Updata metadata for selected columns.
Args:
@@ -486,7 +486,7 @@ class Wavelet:

return coeffs_buf # coeffs_T

def transform_to_single_dataframe(self, data):
def transform_to_single_dataframe(self, data): # pragma: no cover

# print(data)
data_to_transform = data.squeeze(1)
@@ -515,7 +515,7 @@ class Wavelet:

return coeffs_T

def inverse_transform_to_dataframe(self, coeffs):
def inverse_transform_to_dataframe(self, coeffs): # pragma: no cover
# print('=======inverse_transform======')
# print('level: ', self._level)
# print(coeffs)
@@ -527,7 +527,7 @@ class Wavelet:
# print(data)
return data # [0:-1]

def inverse_transform_to_single_dataframe(self, coeffs):
def inverse_transform_to_single_dataframe(self, coeffs): # pragma: no cover
# print('=======inverse_transform======')
# print('level: ', self._level)
# print(coeffs)


+ 9
- 9
tods/timeseries_processing/SKAxiswiseScaler.py View File

@@ -179,18 +179,18 @@ class SKAxiswiseScaler(transformer.TransformerPrimitiveBase[Inputs, Outputs, Hyp
self._input_column_names = inputs.columns
# print(self._columns_to_produce)
sk_inputs = inputs
if self.hyperparams['use_semantic_types']:
if self.hyperparams['use_semantic_types']: # pragma: no cover
sk_inputs = inputs.iloc[:, self._columns_to_produce]
output_columns = []
if len(self._columns_to_produce) > 0:
sk_output = self._clf.produce(sk_inputs)
if sparse.issparse(sk_output):
if sparse.issparse(sk_output): # pragma: no cover
sk_output = sk_output.toarray()
outputs = self._wrap_predictions(inputs, sk_output)
if len(outputs.columns) == len(self._input_column_names):
outputs.columns = self._input_column_names
output_columns = [outputs]
else:
else: # pragma: no cover
if self.hyperparams['error_on_no_input']:
raise RuntimeError("No input columns were selected")
self.logger.warn("No input columns were selected")
@@ -209,7 +209,7 @@ class SKAxiswiseScaler(transformer.TransformerPrimitiveBase[Inputs, Outputs, Hyp


@classmethod
def _get_columns_to_fit(cls, inputs: Inputs, hyperparams: Hyperparams):
def _get_columns_to_fit(cls, inputs: Inputs, hyperparams: Hyperparams): # pragma: no cover
"""
Select columns to fit.
Args:
@@ -239,7 +239,7 @@ class SKAxiswiseScaler(transformer.TransformerPrimitiveBase[Inputs, Outputs, Hyp

@classmethod
def _can_produce_column(cls, inputs_metadata: metadata_base.DataMetadata, column_index: int,
hyperparams: Hyperparams) -> bool:
hyperparams: Hyperparams) -> bool: # pragma: no cover
"""
Output whether a column can be processed.
Args:
@@ -274,7 +274,7 @@ class SKAxiswiseScaler(transformer.TransformerPrimitiveBase[Inputs, Outputs, Hyp


@classmethod
def _get_target_columns_metadata(cls, outputs_metadata: metadata_base.DataMetadata, hyperparams) -> List[OrderedDict]:
def _get_target_columns_metadata(cls, outputs_metadata: metadata_base.DataMetadata, hyperparams) -> List[OrderedDict]: # pragma: no cover
"""
Output metadata of selected columns.
Args:
@@ -307,7 +307,7 @@ class SKAxiswiseScaler(transformer.TransformerPrimitiveBase[Inputs, Outputs, Hyp

@classmethod
def _update_predictions_metadata(cls, inputs_metadata: metadata_base.DataMetadata, outputs: Optional[Outputs],
target_columns_metadata: List[OrderedDict]) -> metadata_base.DataMetadata:
target_columns_metadata: List[OrderedDict]) -> metadata_base.DataMetadata: # pragma: no cover
"""
Updata metadata for selected columns.
Args:
@@ -328,7 +328,7 @@ class SKAxiswiseScaler(transformer.TransformerPrimitiveBase[Inputs, Outputs, Hyp
return outputs_metadata


def _wrap_predictions(self, inputs: Inputs, predictions: ndarray) -> Outputs:
def _wrap_predictions(self, inputs: Inputs, predictions: ndarray) -> Outputs: # pragma: no cover
"""
Wrap predictions into dataframe
Args:
@@ -350,7 +350,7 @@ class SKAxiswiseScaler(transformer.TransformerPrimitiveBase[Inputs, Outputs, Hyp

@classmethod
def _copy_inputs_metadata(cls, inputs_metadata: metadata_base.DataMetadata, input_indices: List[int],
outputs_metadata: metadata_base.DataMetadata, hyperparams):
outputs_metadata: metadata_base.DataMetadata, hyperparams): # pragma: no cover
"""
Updata metadata for selected columns.
Args:


+ 12
- 12
tods/timeseries_processing/SKPowerTransformer.py View File

@@ -196,19 +196,19 @@ class SKPowerTransformer(UnsupervisedLearnerPrimitiveBase[Inputs, Outputs, Param
None
"""

if self._fitted:
if self._fitted: # pragma: no cover
return CallResult(None)

self._training_inputs, self._training_indices = self._get_columns_to_fit(self._inputs, self.hyperparams)
self._input_column_names = self._training_inputs.columns

if self._training_inputs is None:
if self._training_inputs is None: # pragma: no cover
return CallResult(None)

if len(self._training_indices) > 0:
self._clf.fit_transform(self._training_inputs)
self._fitted = True
else:
else: # pragma: no cover
if self.hyperparams['error_on_no_input']:
raise RuntimeError("No input columns were selected")
self.logger.warn("No input columns were selected")
@@ -229,18 +229,18 @@ class SKPowerTransformer(UnsupervisedLearnerPrimitiveBase[Inputs, Outputs, Param
if not self._fitted:
raise PrimitiveNotFittedError("Primitive not fitted.")
sk_inputs = inputs
if self.hyperparams['use_semantic_types']:
if self.hyperparams['use_semantic_types']: # pragma: no cover
sk_inputs = inputs.iloc[:, self._training_indices]
output_columns = []
if len(self._training_indices) > 0:
sk_output = self._clf.transform(sk_inputs)
if sparse.issparse(sk_output):
if sparse.issparse(sk_output): # pragma: no cover
sk_output = sk_output.toarray()
outputs = self._wrap_predictions(inputs, sk_output)
if len(outputs.columns) == len(self._input_column_names):
outputs.columns = self._input_column_names
output_columns = [outputs]
else:
else: # pragma: no cover
if self.hyperparams['error_on_no_input']:
raise RuntimeError("No input columns were selected")
self.logger.warn("No input columns were selected")
@@ -313,7 +313,7 @@ class SKPowerTransformer(UnsupervisedLearnerPrimitiveBase[Inputs, Outputs, Param


@classmethod
def _get_columns_to_fit(cls, inputs: Inputs, hyperparams: Hyperparams):
def _get_columns_to_fit(cls, inputs: Inputs, hyperparams: Hyperparams): # pragma: no cover
"""
Select columns to fit.
Args:
@@ -343,7 +343,7 @@ class SKPowerTransformer(UnsupervisedLearnerPrimitiveBase[Inputs, Outputs, Param

@classmethod
def _can_produce_column(cls, inputs_metadata: metadata_base.DataMetadata, column_index: int,
hyperparams: Hyperparams) -> bool:
hyperparams: Hyperparams) -> bool: # pragma: no cover
"""
Output whether a column can be processed.
Args:
@@ -376,7 +376,7 @@ class SKPowerTransformer(UnsupervisedLearnerPrimitiveBase[Inputs, Outputs, Param


@classmethod
def _get_target_columns_metadata(cls, outputs_metadata: metadata_base.DataMetadata, hyperparams) -> List[OrderedDict]:
def _get_target_columns_metadata(cls, outputs_metadata: metadata_base.DataMetadata, hyperparams) -> List[OrderedDict]: # pragma: no cover
"""
Output metadata of selected columns.
Args:
@@ -409,7 +409,7 @@ class SKPowerTransformer(UnsupervisedLearnerPrimitiveBase[Inputs, Outputs, Param

@classmethod
def _update_predictions_metadata(cls, inputs_metadata: metadata_base.DataMetadata, outputs: Optional[Outputs],
target_columns_metadata: List[OrderedDict]) -> metadata_base.DataMetadata:
target_columns_metadata: List[OrderedDict]) -> metadata_base.DataMetadata: # pragma: no cover
"""
Updata metadata for selected columns.
Args:
@@ -431,7 +431,7 @@ class SKPowerTransformer(UnsupervisedLearnerPrimitiveBase[Inputs, Outputs, Param
return outputs_metadata


def _wrap_predictions(self, inputs: Inputs, predictions: ndarray) -> Outputs:
def _wrap_predictions(self, inputs: Inputs, predictions: ndarray) -> Outputs: # pragma: no cover
"""
Wrap predictions into dataframe
Args:
@@ -451,7 +451,7 @@ class SKPowerTransformer(UnsupervisedLearnerPrimitiveBase[Inputs, Outputs, Param

@classmethod
def _copy_inputs_metadata(cls, inputs_metadata: metadata_base.DataMetadata, input_indices: List[int],
outputs_metadata: metadata_base.DataMetadata, hyperparams):
outputs_metadata: metadata_base.DataMetadata, hyperparams): # pragma: no cover
"""
Updata metadata for selected columns.
Args:


+ 13
- 13
tods/timeseries_processing/SKQuantileTransformer.py View File

@@ -214,19 +214,19 @@ class SKQuantileTransformer(UnsupervisedLearnerPrimitiveBase[Inputs, Outputs, Pa
Returns:
None
"""
if self._fitted:
if self._fitted: # pragma: no cover
return CallResult(None)

self._training_inputs, self._training_indices = self._get_columns_to_fit(self._inputs, self.hyperparams)
self._input_column_names = self._training_inputs.columns

if self._training_inputs is None:
if self._training_inputs is None: # pragma: no cover
return CallResult(None)

if len(self._training_indices) > 0:
self._clf.fit(self._training_inputs)
self._fitted = True
else:
else: # pragma: no cover
if self.hyperparams['error_on_no_input']:
raise RuntimeError("No input columns were selected")
self.logger.warn("No input columns were selected")
@@ -241,21 +241,21 @@ class SKQuantileTransformer(UnsupervisedLearnerPrimitiveBase[Inputs, Outputs, Pa
Returns:
Container DataFrame after Quantile Transformation.
"""
if not self._fitted:
if not self._fitted: # pragma: no cover
raise PrimitiveNotFittedError("Primitive not fitted.")
sk_inputs = inputs
if self.hyperparams['use_semantic_types']:
if self.hyperparams['use_semantic_types']: # pragma: no cover
sk_inputs = inputs.iloc[:, self._training_indices]
output_columns = []
if len(self._training_indices) > 0:
sk_output = self._clf.transform(sk_inputs)
if sparse.issparse(sk_output):
if sparse.issparse(sk_output): # pragma: no cover
sk_output = sk_output.toarray()
outputs = self._wrap_predictions(inputs, sk_output)
if len(outputs.columns) == len(self._input_column_names):
outputs.columns = self._input_column_names
output_columns = [outputs]
else:
else: # pragma: no cover
if self.hyperparams['error_on_no_input']:
raise RuntimeError("No input columns were selected")
self.logger.warn("No input columns were selected")
@@ -327,7 +327,7 @@ class SKQuantileTransformer(UnsupervisedLearnerPrimitiveBase[Inputs, Outputs, Pa

@classmethod
def _get_columns_to_fit(cls, inputs: Inputs, hyperparams: Hyperparams):
def _get_columns_to_fit(cls, inputs: Inputs, hyperparams: Hyperparams): # pragma: no cover
"""
Select columns to fit.
Args:
@@ -353,7 +353,7 @@ class SKQuantileTransformer(UnsupervisedLearnerPrimitiveBase[Inputs, Outputs, Pa
# return columns_to_produce

@classmethod
def _can_produce_column(cls, inputs_metadata: metadata_base.DataMetadata, column_index: int, hyperparams: Hyperparams) -> bool:
def _can_produce_column(cls, inputs_metadata: metadata_base.DataMetadata, column_index: int, hyperparams: Hyperparams) -> bool: # pragma: no cover
"""
Output whether a column can be processed.
Args:
@@ -385,7 +385,7 @@ class SKQuantileTransformer(UnsupervisedLearnerPrimitiveBase[Inputs, Outputs, Pa

@classmethod
def _get_target_columns_metadata(cls, outputs_metadata: metadata_base.DataMetadata, hyperparams) -> List[OrderedDict]:
def _get_target_columns_metadata(cls, outputs_metadata: metadata_base.DataMetadata, hyperparams) -> List[OrderedDict]: # pragma: no cover
"""
Output metadata of selected columns.
Args:
@@ -416,7 +416,7 @@ class SKQuantileTransformer(UnsupervisedLearnerPrimitiveBase[Inputs, Outputs, Pa
@classmethod
def _update_predictions_metadata(cls, inputs_metadata: metadata_base.DataMetadata, outputs: Optional[Outputs],
target_columns_metadata: List[OrderedDict]) -> metadata_base.DataMetadata:
target_columns_metadata: List[OrderedDict]) -> metadata_base.DataMetadata: # pragma: no cover
"""
Updata metadata for selected columns.
Args:
@@ -435,7 +435,7 @@ class SKQuantileTransformer(UnsupervisedLearnerPrimitiveBase[Inputs, Outputs, Pa

return outputs_metadata

def _wrap_predictions(self, inputs: Inputs, predictions: ndarray) -> Outputs:
def _wrap_predictions(self, inputs: Inputs, predictions: ndarray) -> Outputs: # pragma: no cover
"""
Wrap predictions into dataframe
Args:
@@ -453,7 +453,7 @@ class SKQuantileTransformer(UnsupervisedLearnerPrimitiveBase[Inputs, Outputs, Pa

@classmethod
def _copy_inputs_metadata(cls, inputs_metadata: metadata_base.DataMetadata, input_indices: List[int],
outputs_metadata: metadata_base.DataMetadata, hyperparams):
outputs_metadata: metadata_base.DataMetadata, hyperparams): # pragma: no cover
"""
Updata metadata for selected columns.
Args:


+ 12
- 12
tods/timeseries_processing/SKStandardScaler.py View File

@@ -209,19 +209,19 @@ class SKStandardScaler(UnsupervisedLearnerPrimitiveBase[Inputs, Outputs, Params,
None
"""

if self._fitted:
if self._fitted: # pragma: no cover
return CallResult(None)

self._training_inputs, self._training_indices = self._get_columns_to_fit(self._inputs, self.hyperparams)
self._input_column_names = self._training_inputs.columns

if self._training_inputs is None:
if self._training_inputs is None: # pragma: no cover
return CallResult(None)

if len(self._training_indices) > 0:
self._clf.fit_transform(self._training_inputs)
self._fitted = True
else:
else: # pragma: no cover
if self.hyperparams['error_on_no_input']:
raise RuntimeError("No input columns were selected")
self.logger.warn("No input columns were selected")
@@ -244,18 +244,18 @@ class SKStandardScaler(UnsupervisedLearnerPrimitiveBase[Inputs, Outputs, Params,
if not self._fitted:
raise PrimitiveNotFittedError("Primitive not fitted.")
sk_inputs = inputs
if self.hyperparams['use_semantic_types']:
if self.hyperparams['use_semantic_types']: # pragma: no cover
sk_inputs = inputs.iloc[:, self._training_indices]
output_columns = []
if len(self._training_indices) > 0:
sk_output = self._clf.transform(sk_inputs)
if sparse.issparse(sk_output):
if sparse.issparse(sk_output): # pragma: no cover
sk_output = sk_output.toarray()
outputs = self._wrap_predictions(inputs, sk_output)
if len(outputs.columns) == len(self._input_column_names):
outputs.columns = self._input_column_names
output_columns = [outputs]
else:
else: # pragma: no cover
if self.hyperparams['error_on_no_input']:
raise RuntimeError("No input columns were selected")
self.logger.warn("No input columns were selected")
@@ -349,7 +349,7 @@ class SKStandardScaler(UnsupervisedLearnerPrimitiveBase[Inputs, Outputs, Params,


@classmethod
def _get_columns_to_fit(cls, inputs: Inputs, hyperparams: Hyperparams):
def _get_columns_to_fit(cls, inputs: Inputs, hyperparams: Hyperparams): # pragma: no cover

"""
Select columns to fit.
@@ -381,7 +381,7 @@ class SKStandardScaler(UnsupervisedLearnerPrimitiveBase[Inputs, Outputs, Params,

@classmethod
def _can_produce_column(cls, inputs_metadata: metadata_base.DataMetadata, column_index: int,
hyperparams: Hyperparams) -> bool:
hyperparams: Hyperparams) -> bool: # pragma: no cover
"""
Output whether a column can be processed.
Args:
@@ -416,7 +416,7 @@ class SKStandardScaler(UnsupervisedLearnerPrimitiveBase[Inputs, Outputs, Params,


@classmethod
def _get_target_columns_metadata(cls, outputs_metadata: metadata_base.DataMetadata, hyperparams) -> List[OrderedDict]:
def _get_target_columns_metadata(cls, outputs_metadata: metadata_base.DataMetadata, hyperparams) -> List[OrderedDict]: # pragma: no cover

"""
Output metadata of selected columns.
@@ -450,7 +450,7 @@ class SKStandardScaler(UnsupervisedLearnerPrimitiveBase[Inputs, Outputs, Params,

@classmethod
def _update_predictions_metadata(cls, inputs_metadata: metadata_base.DataMetadata, outputs: Optional[Outputs],
target_columns_metadata: List[OrderedDict]) -> metadata_base.DataMetadata:
target_columns_metadata: List[OrderedDict]) -> metadata_base.DataMetadata: # pragma: no cover

"""
Updata metadata for selected columns.
@@ -472,7 +472,7 @@ class SKStandardScaler(UnsupervisedLearnerPrimitiveBase[Inputs, Outputs, Params,
return outputs_metadata


def _wrap_predictions(self, inputs: Inputs, predictions: ndarray) -> Outputs:
def _wrap_predictions(self, inputs: Inputs, predictions: ndarray) -> Outputs: # pragma: no cover

"""
Wrap predictions into dataframe
@@ -495,7 +495,7 @@ class SKStandardScaler(UnsupervisedLearnerPrimitiveBase[Inputs, Outputs, Params,

@classmethod
def _copy_inputs_metadata(cls, inputs_metadata: metadata_base.DataMetadata, input_indices: List[int],
outputs_metadata: metadata_base.DataMetadata, hyperparams):
outputs_metadata: metadata_base.DataMetadata, hyperparams): # pragma: no cover

"""
Updata metadata for selected columns.


Loading…
Cancel
Save