|
| 1 | +# |
| 2 | +# Licensed to the Apache Software Foundation (ASF) under one or more |
| 3 | +# contributor license agreements. See the NOTICE file distributed with |
| 4 | +# this work for additional information regarding copyright ownership. |
| 5 | +# The ASF licenses this file to You under the Apache License, Version 2.0 |
| 6 | +# (the "License"); you may not use this file except in compliance with |
| 7 | +# the License. You may obtain a copy of the License at |
| 8 | +# |
| 9 | +# http://www.apache.org/licenses/LICENSE-2.0 |
| 10 | +# |
| 11 | +# Unless required by applicable law or agreed to in writing, software |
| 12 | +# distributed under the License is distributed on an "AS IS" BASIS, |
| 13 | +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 14 | +# See the License for the specific language governing permissions and |
| 15 | +# limitations under the License. |
| 16 | +# |
| 17 | + |
| 18 | +import logging |
| 19 | +import os.path |
| 20 | +import pickle |
| 21 | +import shutil |
| 22 | +import tempfile |
| 23 | +import unittest |
| 24 | + |
| 25 | +import numpy as np |
| 26 | +from parameterized import parameterized |
| 27 | + |
| 28 | +import apache_beam as beam |
| 29 | +from apache_beam.ml.anomaly.base import AnomalyPrediction |
| 30 | +from apache_beam.ml.anomaly.base import AnomalyResult |
| 31 | +from apache_beam.ml.anomaly.transforms import AnomalyDetection |
| 32 | +from apache_beam.ml.anomaly.transforms_test import _keyed_result_is_equal_to |
| 33 | +from apache_beam.options.pipeline_options import PipelineOptions |
| 34 | +from apache_beam.testing.util import assert_that |
| 35 | +from apache_beam.testing.util import equal_to |
| 36 | + |
| 37 | +# Protect against environments where onnx and pytorch library is not available. |
| 38 | +# pylint: disable=wrong-import-order, wrong-import-position, ungrouped-imports |
| 39 | +try: |
| 40 | + from apache_beam.ml.anomaly.detectors.pyod_adapter import PyODFactory |
| 41 | + from pyod.models.iforest import IForest |
| 42 | +except ImportError: |
| 43 | + raise unittest.SkipTest('PyOD dependencies are not installed') |
| 44 | + |
| 45 | + |
| 46 | +class PyODIForestTest(unittest.TestCase): |
| 47 | + def setUp(self) -> None: |
| 48 | + self.tmp_dir = tempfile.mkdtemp() |
| 49 | + |
| 50 | + seed = 1234 |
| 51 | + model = IForest(random_state=seed) |
| 52 | + model.fit(self.get_train_data()) |
| 53 | + self.pickled_model_uri = os.path.join(self.tmp_dir, 'iforest_pickled') |
| 54 | + |
| 55 | + with open(self.pickled_model_uri, 'wb') as fp: |
| 56 | + pickle.dump(model, fp) |
| 57 | + |
| 58 | + def tearDown(self) -> None: |
| 59 | + shutil.rmtree(self.tmp_dir) |
| 60 | + |
| 61 | + def get_train_data(self): |
| 62 | + return [ |
| 63 | + np.array([1, 5], dtype="float32"), |
| 64 | + np.array([2, 6], dtype="float32"), |
| 65 | + np.array([3, 4], dtype="float32"), |
| 66 | + np.array([2, 6], dtype="float32"), |
| 67 | + np.array([10, 10], dtype="float32"), # need an outlier in training data |
| 68 | + np.array([3, 4], dtype="float32"), |
| 69 | + np.array([2, 6], dtype="float32"), |
| 70 | + np.array([2, 6], dtype="float32"), |
| 71 | + np.array([2, 5], dtype="float32"), |
| 72 | + ] |
| 73 | + |
| 74 | + def get_test_data(self): |
| 75 | + return [ |
| 76 | + np.array([2, 6], dtype="float32"), |
| 77 | + np.array([100, 100], dtype="float32"), |
| 78 | + ] |
| 79 | + |
| 80 | + def get_test_data_with_target(self): |
| 81 | + return [ |
| 82 | + np.array([2, 6, 0], dtype="float32"), |
| 83 | + np.array([100, 100, 1], dtype="float32"), |
| 84 | + ] |
| 85 | + |
| 86 | + @parameterized.expand([True, False]) |
| 87 | + def test_scoring_with_matched_features(self, with_target): |
| 88 | + if with_target: |
| 89 | + rows = [beam.Row(a=2, b=6, target=0), beam.Row(a=100, b=100, target=1)] |
| 90 | + field_names = ["a", "b", "target"] |
| 91 | + # The selected features should match the features used for training |
| 92 | + detector = PyODFactory.create_detector( |
| 93 | + self.pickled_model_uri, features=["a", "b"]) |
| 94 | + input_data = self.get_test_data_with_target() |
| 95 | + else: |
| 96 | + rows = [beam.Row(a=2, b=6), beam.Row(a=100, b=100)] |
| 97 | + field_names = ["a", "b"] |
| 98 | + detector = PyODFactory.create_detector(self.pickled_model_uri) |
| 99 | + input_data = self.get_test_data() |
| 100 | + |
| 101 | + expected_out = [( |
| 102 | + 0, |
| 103 | + AnomalyResult( |
| 104 | + example=rows[0], |
| 105 | + predictions=[ |
| 106 | + AnomalyPrediction( |
| 107 | + model_id='OfflineDetector', |
| 108 | + score=-0.20316164744828075, |
| 109 | + label=0, |
| 110 | + threshold=8.326672684688674e-17, |
| 111 | + info='', |
| 112 | + source_predictions=None) |
| 113 | + ])), |
| 114 | + ( |
| 115 | + 0, |
| 116 | + AnomalyResult( |
| 117 | + example=rows[1], |
| 118 | + predictions=[ |
| 119 | + AnomalyPrediction( |
| 120 | + model_id='OfflineDetector', |
| 121 | + score=0.179516865091218, |
| 122 | + label=1, |
| 123 | + threshold=8.326672684688674e-17, |
| 124 | + info='', |
| 125 | + source_predictions=None) |
| 126 | + ]))] |
| 127 | + |
| 128 | + options = PipelineOptions([]) |
| 129 | + with beam.Pipeline(options=options) as p: |
| 130 | + out = ( |
| 131 | + p | beam.Create(input_data) |
| 132 | + | beam.Map(lambda x: beam.Row(**dict(zip(field_names, map(int, x))))) |
| 133 | + | beam.WithKeys(0) |
| 134 | + | AnomalyDetection(detector=detector)) |
| 135 | + assert_that(out, equal_to(expected_out, _keyed_result_is_equal_to)) |
| 136 | + |
| 137 | + def test_scoring_with_unmatched_features(self): |
| 138 | + # The model is trained with two features: a, b, but the input features of |
| 139 | + # scoring has one more feature (target). |
| 140 | + # In this case, we should either get rid of the extra feature(s) from |
| 141 | + # the scoring input or set `features` when creating the offline detector |
| 142 | + # (see the `test_scoring_with_matched_features`) |
| 143 | + detector = PyODFactory.create_detector(self.pickled_model_uri) |
| 144 | + options = PipelineOptions([]) |
| 145 | + p = beam.Pipeline(options=options) |
| 146 | + _ = ( |
| 147 | + p | beam.Create(self.get_test_data_with_target()) |
| 148 | + | beam.Map( |
| 149 | + lambda x: beam.Row(**dict(zip(["a", "b", "target"], map(int, x))))) |
| 150 | + | beam.WithKeys(0) |
| 151 | + | AnomalyDetection(detector=detector)) |
| 152 | + |
| 153 | + # This should raise a ValueError with message |
| 154 | + # "X has 3 features, but IsolationForest is expecting 2 features as input." |
| 155 | + self.assertRaises(ValueError, p.run) |
| 156 | + |
| 157 | + |
| 158 | +if __name__ == '__main__': |
| 159 | + logging.getLogger().setLevel(logging.WARNING) |
| 160 | + unittest.main() |
0 commit comments