Merge "Multi datasource support for Basic Consolidation"

This commit is contained in:
Jenkins
2017-01-16 09:54:24 +00:00
committed by Gerrit Code Review
12 changed files with 472 additions and 90 deletions

View File

@@ -16,12 +16,10 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import random
import oslo_utils
class FakerMetricsCollector(object):
class FakeCeilometerMetrics(object):
def __init__(self):
self.emptytype = ""
@@ -46,19 +44,20 @@ class FakerMetricsCollector(object):
elif meter_name == "hardware.ipmi.node.airflow":
result = self.get_average_airflow(resource_id)
elif meter_name == "hardware.ipmi.node.temperature":
result = self.get_average_inletT(resource_id)
result = self.get_average_inlet_t(resource_id)
elif meter_name == "hardware.ipmi.node.power":
result = self.get_average_power(resource_id)
return result
def mock_get_statistics_wb(self, resource_id, meter_name, period,
aggregate='avg'):
result = 0
result = 0.0
if meter_name == "cpu_util":
result = self.get_average_usage_instance_cpu_wb(resource_id)
return result
def get_average_outlet_temperature(self, uuid):
@staticmethod
def get_average_outlet_temperature(uuid):
"""The average outlet temperature for host"""
mock = {}
mock['Node_0'] = 30
@@ -68,14 +67,15 @@ class FakerMetricsCollector(object):
mock[uuid] = 100
return mock[str(uuid)]
def get_usage_node_ram(self, uuid):
@staticmethod
def get_usage_node_ram(uuid):
mock = {}
# Ceilometer returns hardware.memory.used samples in KB.
mock['Node_0'] = 7*oslo_utils.units.Ki
mock['Node_1'] = 5*oslo_utils.units.Ki
mock['Node_2'] = 29*oslo_utils.units.Ki
mock['Node_3'] = 8*oslo_utils.units.Ki
mock['Node_4'] = 4*oslo_utils.units.Ki
mock['Node_0'] = 7 * oslo_utils.units.Ki
mock['Node_1'] = 5 * oslo_utils.units.Ki
mock['Node_2'] = 29 * oslo_utils.units.Ki
mock['Node_3'] = 8 * oslo_utils.units.Ki
mock['Node_4'] = 4 * oslo_utils.units.Ki
if uuid not in mock.keys():
# mock[uuid] = random.randint(1, 4)
@@ -83,7 +83,8 @@ class FakerMetricsCollector(object):
return float(mock[str(uuid)])
def get_average_airflow(self, uuid):
@staticmethod
def get_average_airflow(uuid):
"""The average outlet temperature for host"""
mock = {}
mock['Node_0'] = 400
@@ -93,7 +94,8 @@ class FakerMetricsCollector(object):
mock[uuid] = 200
return mock[str(uuid)]
def get_average_inletT(self, uuid):
@staticmethod
def get_average_inlet_t(uuid):
"""The average outlet temperature for host"""
mock = {}
mock['Node_0'] = 24
@@ -102,7 +104,8 @@ class FakerMetricsCollector(object):
mock[uuid] = 28
return mock[str(uuid)]
def get_average_power(self, uuid):
@staticmethod
def get_average_power(uuid):
"""The average outlet temperature for host"""
mock = {}
mock['Node_0'] = 260
@@ -111,12 +114,13 @@ class FakerMetricsCollector(object):
mock[uuid] = 200
return mock[str(uuid)]
def get_usage_node_cpu(self, uuid):
@staticmethod
def get_usage_node_cpu(uuid):
"""The last VM CPU usage values to average
:param uuid:00
:return:
"""
:param uuid:00
:return:
"""
# query influxdb stream
# compute in stream
@@ -151,12 +155,13 @@ class FakerMetricsCollector(object):
return float(mock[str(uuid)])
def get_average_usage_instance_cpu_wb(self, uuid):
@staticmethod
def get_average_usage_instance_cpu_wb(uuid):
"""The last VM CPU usage values to average
:param uuid:00
:return:
"""
:param uuid:00
:return:
"""
# query influxdb stream
# compute in stream
@@ -171,7 +176,8 @@ class FakerMetricsCollector(object):
mock['INSTANCE_4'] = 10
return float(mock[str(uuid)])
def get_average_usage_instance_cpu(self, uuid):
@staticmethod
def get_average_usage_instance_cpu(uuid):
"""The last VM CPU usage values to average
:param uuid:00
@@ -204,7 +210,8 @@ class FakerMetricsCollector(object):
return mock[str(uuid)]
def get_average_usage_instance_memory(self, uuid):
@staticmethod
def get_average_usage_instance_memory(uuid):
mock = {}
# node 0
mock['INSTANCE_0'] = 2
@@ -227,7 +234,8 @@ class FakerMetricsCollector(object):
return mock[str(uuid)]
def get_average_usage_instance_disk(self, uuid):
@staticmethod
def get_average_usage_instance_disk(uuid):
mock = {}
# node 0
mock['INSTANCE_0'] = 2
@@ -250,6 +258,3 @@ class FakerMetricsCollector(object):
mock[uuid] = 4
return mock[str(uuid)]
def get_virtual_machine_capacity(self, instance_uuid):
return random.randint(1, 4)

View File

@@ -102,12 +102,11 @@ class FakeCeilometerMetrics(object):
Returns relative node CPU utilization <0, 100>.
:param r_id: resource id
"""
id = '%s_%s' % (r_id.split('_')[0], r_id.split('_')[1])
instances = self.model.get_mapping().get_node_instances_by_uuid(id)
uuid = '%s_%s' % (r_id.split('_')[0], r_id.split('_')[1])
instances = self.model.get_mapping().get_node_instances_by_uuid(uuid)
util_sum = 0.0
node_cpu_cores = self.model.get_resource_by_uuid(
element.ResourceType.cpu_cores).get_capacity_by_uuid(id)
element.ResourceType.cpu_cores).get_capacity_by_uuid(uuid)
for instance_uuid in instances:
instance_cpu_cores = self.model.get_resource_by_uuid(
element.ResourceType.cpu_cores).\
@@ -118,7 +117,8 @@ class FakeCeilometerMetrics(object):
util_sum /= node_cpu_cores
return util_sum * 100.0
def get_instance_cpu_util(self, r_id):
@staticmethod
def get_instance_cpu_util(r_id):
instance_cpu_util = dict()
instance_cpu_util['INSTANCE_0'] = 10
instance_cpu_util['INSTANCE_1'] = 30
@@ -132,7 +132,8 @@ class FakeCeilometerMetrics(object):
instance_cpu_util['INSTANCE_9'] = 100
return instance_cpu_util[str(r_id)]
def get_instance_ram_util(self, r_id):
@staticmethod
def get_instance_ram_util(r_id):
instance_ram_util = dict()
instance_ram_util['INSTANCE_0'] = 1
instance_ram_util['INSTANCE_1'] = 2
@@ -146,7 +147,8 @@ class FakeCeilometerMetrics(object):
instance_ram_util['INSTANCE_9'] = 8
return instance_ram_util[str(r_id)]
def get_instance_disk_root_size(self, r_id):
@staticmethod
def get_instance_disk_root_size(r_id):
instance_disk_util = dict()
instance_disk_util['INSTANCE_0'] = 10
instance_disk_util['INSTANCE_1'] = 15

View File

@@ -0,0 +1,267 @@
# -*- encoding: utf-8 -*-
# Copyright (c) 2015 b<>com
#
# Authors: Jean-Emile DARTOIS <jean-emile.dartois@b-com.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import oslo_utils
class FakeMonascaMetrics(object):
def __init__(self):
self.emptytype = ""
def empty_one_metric(self, emptytype):
self.emptytype = emptytype
def mock_get_statistics(self, meter_name, dimensions, period,
aggregate='avg'):
resource_id = dimensions.get(
"resource_id") or dimensions.get("hostname")
result = 0.0
if meter_name == "cpu.percent":
result = self.get_usage_node_cpu(resource_id)
elif meter_name == "vm.cpu.utilization_perc":
result = self.get_average_usage_instance_cpu(resource_id)
# elif meter_name == "hardware.memory.used":
# result = self.get_usage_node_ram(resource_id)
# elif meter_name == "memory.resident":
# result = self.get_average_usage_instance_memory(resource_id)
# elif meter_name == "hardware.ipmi.node.outlet_temperature":
# result = self.get_average_outlet_temperature(resource_id)
# elif meter_name == "hardware.ipmi.node.airflow":
# result = self.get_average_airflow(resource_id)
# elif meter_name == "hardware.ipmi.node.temperature":
# result = self.get_average_inlet_t(resource_id)
# elif meter_name == "hardware.ipmi.node.power":
# result = self.get_average_power(resource_id)
return result
def mock_get_statistics_wb(self, meter_name, dimensions, period,
aggregate='avg'):
resource_id = dimensions.get(
"resource_id") or dimensions.get("hostname")
result = 0.0
if meter_name == "vm.cpu.utilization_perc":
result = self.get_average_usage_instance_cpu_wb(resource_id)
return result
@staticmethod
def get_average_outlet_temperature(uuid):
"""The average outlet temperature for host"""
measurements = {}
measurements['Node_0'] = 30
# use a big value to make sure it exceeds threshold
measurements['Node_1'] = 100
if uuid not in measurements.keys():
measurements[uuid] = 100
return [{'columns': ['avg'],
'statistics': [[float(measurements[str(uuid)])]]}]
@staticmethod
def get_usage_node_ram(uuid):
measurements = {}
# Monasca returns hardware.memory.used samples in KB.
measurements['Node_0'] = 7 * oslo_utils.units.Ki
measurements['Node_1'] = 5 * oslo_utils.units.Ki
measurements['Node_2'] = 29 * oslo_utils.units.Ki
measurements['Node_3'] = 8 * oslo_utils.units.Ki
measurements['Node_4'] = 4 * oslo_utils.units.Ki
if uuid not in measurements.keys():
# measurements[uuid] = random.randint(1, 4)
measurements[uuid] = 8
return float(measurements[str(uuid)])
@staticmethod
def get_average_airflow(uuid):
"""The average outlet temperature for host"""
measurements = {}
measurements['Node_0'] = 400
# use a big value to make sure it exceeds threshold
measurements['Node_1'] = 100
if uuid not in measurements.keys():
measurements[uuid] = 200
return [{'columns': ['avg'],
'statistics': [[float(measurements[str(uuid)])]]}]
@staticmethod
def get_average_inlet_t(uuid):
"""The average outlet temperature for host"""
measurements = {}
measurements['Node_0'] = 24
measurements['Node_1'] = 26
if uuid not in measurements.keys():
measurements[uuid] = 28
return [{'columns': ['avg'],
'statistics': [[float(measurements[str(uuid)])]]}]
@staticmethod
def get_average_power(uuid):
"""The average outlet temperature for host"""
measurements = {}
measurements['Node_0'] = 260
measurements['Node_1'] = 240
if uuid not in measurements.keys():
measurements[uuid] = 200
return [{'columns': ['avg'],
'statistics': [[float(measurements[str(uuid)])]]}]
@staticmethod
def get_usage_node_cpu(uuid):
"""The last VM CPU usage values to average
:param uuid:00
:return:
"""
# query influxdb stream
# compute in stream
# Normalize
measurements = {}
# node 0
measurements['Node_0'] = 7
measurements['Node_1'] = 7
# node 1
measurements['Node_2'] = 80
# node 2
measurements['Node_3'] = 5
measurements['Node_4'] = 5
measurements['Node_5'] = 10
# node 3
measurements['Node_6'] = 8
measurements['Node_19'] = 10
# node 4
measurements['INSTANCE_7'] = 4
if uuid not in measurements.keys():
# measurements[uuid] = random.randint(1, 4)
measurements[uuid] = 8
# import ipdb; ipdb.set_trace()
return [{'columns': ['avg'],
'statistics': [[float(measurements[str(uuid)])]]}]
# return float(measurements[str(uuid)])
@staticmethod
def get_average_usage_instance_cpu_wb(uuid):
"""The last VM CPU usage values to average
:param uuid:00
:return:
"""
# query influxdb stream
# compute in stream
# Normalize
measurements = {}
# node 0
measurements['INSTANCE_1'] = 80
measurements['73b09e16-35b7-4922-804e-e8f5d9b740fc'] = 50
# node 1
measurements['INSTANCE_3'] = 20
measurements['INSTANCE_4'] = 10
return [{'columns': ['avg'],
'statistics': [[float(measurements[str(uuid)])]]}]
@staticmethod
def get_average_usage_instance_cpu(uuid):
"""The last VM CPU usage values to average
:param uuid:00
:return:
"""
# query influxdb stream
# compute in stream
# Normalize
measurements = {}
# node 0
measurements['INSTANCE_0'] = 7
measurements['INSTANCE_1'] = 7
# node 1
measurements['INSTANCE_2'] = 10
# node 2
measurements['INSTANCE_3'] = 5
measurements['INSTANCE_4'] = 5
measurements['INSTANCE_5'] = 10
# node 3
measurements['INSTANCE_6'] = 8
# node 4
measurements['INSTANCE_7'] = 4
if uuid not in measurements.keys():
# measurements[uuid] = random.randint(1, 4)
measurements[uuid] = 8
return [{'columns': ['avg'],
'statistics': [[float(measurements[str(uuid)])]]}]
@staticmethod
def get_average_usage_instance_memory(uuid):
measurements = {}
# node 0
measurements['INSTANCE_0'] = 2
measurements['INSTANCE_1'] = 5
# node 1
measurements['INSTANCE_2'] = 5
# node 2
measurements['INSTANCE_3'] = 8
measurements['INSTANCE_4'] = 5
measurements['INSTANCE_5'] = 16
# node 3
measurements['INSTANCE_6'] = 8
# node 4
measurements['INSTANCE_7'] = 4
if uuid not in measurements.keys():
# measurements[uuid] = random.randint(1, 4)
measurements[uuid] = 10
return [{'columns': ['avg'],
'statistics': [[float(measurements[str(uuid)])]]}]
@staticmethod
def get_average_usage_instance_disk(uuid):
measurements = {}
# node 0
measurements['INSTANCE_0'] = 2
measurements['INSTANCE_1'] = 2
# node 1
measurements['INSTANCE_2'] = 2
# node 2
measurements['INSTANCE_3'] = 10
measurements['INSTANCE_4'] = 15
measurements['INSTANCE_5'] = 20
# node 3
measurements['INSTANCE_6'] = 8
# node 4
measurements['INSTANCE_7'] = 4
if uuid not in measurements.keys():
# measurements[uuid] = random.randint(1, 4)
measurements[uuid] = 4
return [{'columns': ['avg'],
'statistics': [[float(measurements[str(uuid)])]]}]

View File

@@ -24,35 +24,37 @@ from watcher.decision_engine.strategy import strategies
from watcher import objects
from watcher.tests.db import base
from watcher.tests.db import utils as db_utils
from watcher.tests.decision_engine.model import ceilometer_metrics as fake
from watcher.tests.decision_engine.model import faker_cluster_state
from watcher.tests.decision_engine.model import faker_metrics_collector as fake
from watcher.tests.objects import utils as obj_utils
class SolutionFaker(object):
@staticmethod
def build():
metrics = fake.FakerMetricsCollector()
metrics = fake.FakeCeilometerMetrics()
current_state_cluster = faker_cluster_state.FakerModelCollector()
sercon = strategies.BasicConsolidation(config=mock.Mock())
sercon._compute_model = current_state_cluster.generate_scenario_1()
sercon.ceilometer = mock.MagicMock(
strategy = strategies.BasicConsolidation(
config=mock.Mock(datasource="ceilometer"))
strategy._compute_model = current_state_cluster.generate_scenario_1()
strategy.ceilometer = mock.MagicMock(
get_statistics=metrics.mock_get_statistics)
return sercon.execute()
return strategy.execute()
class SolutionFakerSingleHyp(object):
@staticmethod
def build():
metrics = fake.FakerMetricsCollector()
metrics = fake.FakeCeilometerMetrics()
current_state_cluster = faker_cluster_state.FakerModelCollector()
sercon = strategies.BasicConsolidation(config=mock.Mock())
sercon._compute_model = (
strategy = strategies.BasicConsolidation(
config=mock.Mock(datasource="ceilometer"))
strategy._compute_model = (
current_state_cluster.generate_scenario_3_with_2_nodes())
sercon.ceilometer = mock.MagicMock(
strategy.ceilometer = mock.MagicMock(
get_statistics=metrics.mock_get_statistics)
return sercon.execute()
return strategy.execute()
class TestActionScheduling(base.DbTestCase):

View File

@@ -26,16 +26,26 @@ from watcher.common import exception
from watcher.decision_engine.model import model_root
from watcher.decision_engine.strategy import strategies
from watcher.tests import base
from watcher.tests.decision_engine.model import ceilometer_metrics
from watcher.tests.decision_engine.model import faker_cluster_state
from watcher.tests.decision_engine.model import faker_metrics_collector
from watcher.tests.decision_engine.model import monasca_metrics
class TestBasicConsolidation(base.TestCase):
scenarios = [
("Ceilometer",
{"datasource": "ceilometer",
"fake_datasource_cls": ceilometer_metrics.FakeCeilometerMetrics}),
("Monasca",
{"datasource": "monasca",
"fake_datasource_cls": monasca_metrics.FakeMonascaMetrics}),
]
def setUp(self):
super(TestBasicConsolidation, self).setUp()
# fake metrics
self.fake_metrics = faker_metrics_collector.FakerMetricsCollector()
self.fake_metrics = self.fake_datasource_cls()
# fake cluster
self.fake_cluster = faker_cluster_state.FakerModelCollector()
@@ -50,11 +60,11 @@ class TestBasicConsolidation(base.TestCase):
self.m_model = p_model.start()
self.addCleanup(p_model.stop)
p_ceilometer = mock.patch.object(
strategies.BasicConsolidation, "ceilometer",
p_datasource = mock.patch.object(
strategies.BasicConsolidation, self.datasource,
new_callable=mock.PropertyMock)
self.m_ceilometer = p_ceilometer.start()
self.addCleanup(p_ceilometer.stop)
self.m_datasource = p_datasource.start()
self.addCleanup(p_datasource.stop)
p_audit_scope = mock.patch.object(
strategies.BasicConsolidation, "audit_scope",
@@ -66,9 +76,10 @@ class TestBasicConsolidation(base.TestCase):
self.m_audit_scope.return_value = mock.Mock()
self.m_model.return_value = model_root.ModelRoot()
self.m_ceilometer.return_value = mock.Mock(
self.m_datasource.return_value = mock.Mock(
statistic_aggregation=self.fake_metrics.mock_get_statistics)
self.strategy = strategies.BasicConsolidation(config=mock.Mock())
self.strategy = strategies.BasicConsolidation(
config=mock.Mock(datasource=self.datasource))
def test_cluster_size(self):
size_cluster = len(
@@ -126,7 +137,7 @@ class TestBasicConsolidation(base.TestCase):
instance_0_score = 0.023333333333333355
self.assertEqual(
instance_0_score,
self.strategy.calculate_score_instance(instance_0, ))
self.strategy.calculate_score_instance(instance_0))
def test_basic_consolidation_weight(self):
model = self.fake_cluster.generate_scenario_1()

View File

@@ -26,8 +26,8 @@ from watcher.decision_engine.model import element
from watcher.decision_engine.model import model_root
from watcher.decision_engine.strategy import strategies
from watcher.tests import base
from watcher.tests.decision_engine.model import ceilometer_metrics
from watcher.tests.decision_engine.model import faker_cluster_state
from watcher.tests.decision_engine.model import faker_metrics_collector
class TestOutletTempControl(base.TestCase):
@@ -35,7 +35,7 @@ class TestOutletTempControl(base.TestCase):
def setUp(self):
super(TestOutletTempControl, self).setUp()
# fake metrics
self.fake_metrics = faker_metrics_collector.FakerMetricsCollector()
self.fake_metrics = ceilometer_metrics.FakeCeilometerMetrics()
# fake cluster
self.fake_cluster = faker_cluster_state.FakerModelCollector()

View File

@@ -26,8 +26,8 @@ from watcher.decision_engine.model import element
from watcher.decision_engine.model import model_root
from watcher.decision_engine.strategy import strategies
from watcher.tests import base
from watcher.tests.decision_engine.model import ceilometer_metrics
from watcher.tests.decision_engine.model import faker_cluster_state
from watcher.tests.decision_engine.model import faker_metrics_collector
class TestUniformAirflow(base.TestCase):
@@ -35,7 +35,7 @@ class TestUniformAirflow(base.TestCase):
def setUp(self):
super(TestUniformAirflow, self).setUp()
# fake metrics
self.fake_metrics = faker_metrics_collector.FakerMetricsCollector()
self.fake_metrics = ceilometer_metrics.FakeCeilometerMetrics()
# fake cluster
self.fake_cluster = faker_cluster_state.FakerModelCollector()

View File

@@ -26,8 +26,8 @@ from watcher.decision_engine.model import element
from watcher.decision_engine.model import model_root
from watcher.decision_engine.strategy import strategies
from watcher.tests import base
from watcher.tests.decision_engine.model import ceilometer_metrics
from watcher.tests.decision_engine.model import faker_cluster_state
from watcher.tests.decision_engine.model import faker_metrics_collector
class TestWorkloadBalance(base.TestCase):
@@ -35,7 +35,7 @@ class TestWorkloadBalance(base.TestCase):
def setUp(self):
super(TestWorkloadBalance, self).setUp()
# fake metrics
self.fake_metrics = faker_metrics_collector.FakerMetricsCollector()
self.fake_metrics = ceilometer_metrics.FakeCeilometerMetrics()
# fake cluster
self.fake_cluster = faker_cluster_state.FakerModelCollector()

View File

@@ -23,8 +23,8 @@ from watcher.common import utils
from watcher.decision_engine.model import model_root
from watcher.decision_engine.strategy import strategies
from watcher.tests import base
from watcher.tests.decision_engine.model import ceilometer_metrics
from watcher.tests.decision_engine.model import faker_cluster_state
from watcher.tests.decision_engine.model import faker_metrics_collector
class TestWorkloadStabilization(base.TestCase):
@@ -33,7 +33,7 @@ class TestWorkloadStabilization(base.TestCase):
super(TestWorkloadStabilization, self).setUp()
# fake metrics
self.fake_metrics = faker_metrics_collector.FakerMetricsCollector()
self.fake_metrics = ceilometer_metrics.FakeCeilometerMetrics()
# fake cluster
self.fake_cluster = faker_cluster_state.FakerModelCollector()