Add Scoring Module implementation

This change is adding the main logic for the scoring module,
defines entry points for the scoring engine plugins and provides
a watcher-sync tool to enable Watcher database synchronization
without needing to restart any Watcher service.

Partially-Implements: blueprint scoring-module
Change-Id: If10daae969ec27a7008af5173359992e957dcd5e
This commit is contained in:
Tomasz Kaczynski
2016-08-03 08:11:00 +00:00
parent ab10201c72
commit a1cb142009
18 changed files with 1073 additions and 2 deletions

View File

@@ -0,0 +1,54 @@
# -*- encoding: utf-8 -*-
# Copyright (c) 2016 Intel
#
# Authors: Tomasz Kaczynski <tomasz.kaczynski@intel.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from oslo_serialization import jsonutils
from watcher.decision_engine.scoring import dummy_scorer
from watcher.tests import base
class TestDummyScorer(base.TestCase):
def setUp(self):
super(TestDummyScorer, self).setUp()
def test_metadata(self):
scorer = dummy_scorer.DummyScorer(config=None)
self.assertEqual('dummy_scorer', scorer.get_name())
self.assertTrue('Dummy' in scorer.get_description())
metainfo = scorer.get_metainfo()
self.assertTrue('feature_columns' in metainfo)
self.assertTrue('result_columns' in metainfo)
self.assertTrue('workloads' in metainfo)
def test_calculate_score(self):
scorer = dummy_scorer.DummyScorer(config=None)
self._assert_result(scorer, 0, '[0, 0, 0, 0, 0, 0, 0, 0, 0]')
self._assert_result(scorer, 0, '[50, 0, 0, 600, 0, 0, 0, 0, 0]')
self._assert_result(scorer, 0, '[0, 0, 0, 0, 600, 0, 0, 0, 0]')
self._assert_result(scorer, 1, '[85, 0, 0, 0, 0, 0, 0, 0, 0]')
self._assert_result(scorer, 2, '[0, 0, 0, 1100, 1100, 0, 0, 0, 0]')
self._assert_result(scorer, 3,
'[0, 0, 0, 0, 0, 70000000, 70000000, 0, 0]')
def _assert_result(self, scorer, expected, features):
result_str = scorer.calculate_score(features)
actual_result = jsonutils.loads(result_str)[0]
self.assertEqual(expected, actual_result)

View File

@@ -0,0 +1,51 @@
# -*- encoding: utf-8 -*-
# Copyright (c) 2016 Intel
#
# Authors: Tomasz Kaczynski <tomasz.kaczynski@intel.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from oslo_serialization import jsonutils
from watcher.decision_engine.scoring import dummy_scoring_container
from watcher.tests import base
class TestDummyScoringContainer(base.TestCase):
def setUp(self):
super(TestDummyScoringContainer, self).setUp()
def test_get_scoring_engine_list(self):
scorers = (dummy_scoring_container.DummyScoringContainer
.get_scoring_engine_list())
self.assertEqual(3, len(scorers))
self.assertEqual('dummy_min_scorer', scorers[0].get_name())
self.assertEqual('dummy_max_scorer', scorers[1].get_name())
self.assertEqual('dummy_avg_scorer', scorers[2].get_name())
def test_scorers(self):
scorers = (dummy_scoring_container.DummyScoringContainer
.get_scoring_engine_list())
self._assert_result(scorers[0], 1.1, '[1.1, 2.2, 4, 8]')
self._assert_result(scorers[1], 8, '[1.1, 2.2, 4, 8]')
# float(1 + 2 + 4 + 8) / 4 = 15.0 / 4 = 3.75
self._assert_result(scorers[2], 3.75, '[1, 2, 4, 8]')
def _assert_result(self, scorer, expected, features):
result_str = scorer.calculate_score(features)
actual_result = jsonutils.loads(result_str)[0]
self.assertEqual(expected, actual_result)

View File

@@ -0,0 +1,53 @@
# -*- encoding: utf-8 -*-
# Copyright (c) 2016 Intel
#
# Authors: Tomasz Kaczynski <tomasz.kaczynski@intel.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from watcher.decision_engine.scoring import scoring_factory
from watcher.tests import base
class TestScoringFactory(base.TestCase):
def setUp(self):
super(TestScoringFactory, self).setUp()
def test_get_scoring_engine(self):
scorer = scoring_factory.get_scoring_engine('dummy_scorer')
self.assertEqual('dummy_scorer', scorer.get_name())
scorer = scoring_factory.get_scoring_engine('dummy_min_scorer')
self.assertEqual('dummy_min_scorer', scorer.get_name())
scorer = scoring_factory.get_scoring_engine('dummy_max_scorer')
self.assertEqual('dummy_max_scorer', scorer.get_name())
scorer = scoring_factory.get_scoring_engine('dummy_avg_scorer')
self.assertEqual('dummy_avg_scorer', scorer.get_name())
self.assertRaises(
KeyError,
scoring_factory.get_scoring_engine,
'non_existing_scorer')
def test_get_scoring_engine_list(self):
scoring_engines = scoring_factory.get_scoring_engine_list()
engine_names = {'dummy_scorer', 'dummy_min_scorer',
'dummy_max_scorer', 'dummy_avg_scorer'}
for scorer in scoring_engines:
self.assertIn(scorer.get_name(), engine_names)

View File

@@ -0,0 +1,61 @@
# -*- encoding: utf-8 -*-
# Copyright (c) 2015 b<>com
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
from watcher.applier.loading import default
from watcher.common import utils
from watcher.decision_engine.model import model_root
from watcher.decision_engine.strategy import strategies
from watcher.tests import base
from watcher.tests.decision_engine.strategy.strategies import \
faker_cluster_state
class TestDummyWithScorer(base.TestCase):
def setUp(self):
super(TestDummyWithScorer, self).setUp()
# fake cluster
self.fake_cluster = faker_cluster_state.FakerModelCollector()
p_model = mock.patch.object(
strategies.DummyWithScorer, "compute_model",
new_callable=mock.PropertyMock)
self.m_model = p_model.start()
self.addCleanup(p_model.stop)
self.m_model.return_value = model_root.ModelRoot()
self.strategy = strategies.DummyWithScorer(config=mock.Mock())
def test_dummy_with_scorer(self):
dummy = strategies.DummyWithScorer(config=mock.Mock())
dummy.input_parameters = utils.Struct()
dummy.input_parameters.update({'param1': 4.0, 'param2': 'Hi'})
solution = dummy.execute()
self.assertEqual(4, len(solution.actions))
def test_check_parameters(self):
model = self.fake_cluster.generate_scenario_3_with_2_nodes()
self.m_model.return_value = model
self.strategy.input_parameters = utils.Struct()
self.strategy.input_parameters.update({'param1': 4.0, 'param2': 'Hi'})
solution = self.strategy.execute()
loader = default.DefaultActionLoader()
for action in solution.actions:
loaded_action = loader.load(action['action_type'])
loaded_action.input_parameters = action['input_parameters']
loaded_action.validate_parameters()