Add gnocchi support in basic_consolidation strategy

This patch adds gnocchi support in basic_consolidation strategy
and adds unit tests corresponding to that change.

Change-Id: Ia1ee55fca8eadffbd244c0247577805b6856369d
Partiallly-Implements: bp gnocchi-watcher
This commit is contained in:
Santhosh Fernandes
2017-03-02 15:35:39 +05:30
parent 2c2120526c
commit 18aa50c58e
3 changed files with 303 additions and 1 deletions

View File

@@ -35,12 +35,15 @@ migration is possible on your OpenStack cluster.
"""
import datetime
from oslo_config import cfg
from oslo_log import log
from watcher._i18n import _, _LE, _LI, _LW
from watcher.common import exception
from watcher.datasource import ceilometer as ceil
from watcher.datasource import gnocchi as gnoc
from watcher.datasource import monasca as mon
from watcher.decision_engine.model import element
from watcher.decision_engine.strategy.strategies import base
@@ -61,6 +64,9 @@ class BasicConsolidation(base.ServerConsolidationBaseStrategy):
monasca=dict(
host_cpu_usage='cpu.percent',
instance_cpu_usage='vm.cpu.utilization_perc'),
gnocchi=dict(
host_cpu_usage='compute.node.cpu.percent',
instance_cpu_usage='cpu_util'),
)
MIGRATION = "migrate"
@@ -87,6 +93,7 @@ class BasicConsolidation(base.ServerConsolidationBaseStrategy):
self._ceilometer = None
self._monasca = None
self._gnocchi = None
# TODO(jed): improve threshold overbooking?
self.threshold_mem = 1
@@ -105,6 +112,10 @@ class BasicConsolidation(base.ServerConsolidationBaseStrategy):
def period(self):
return self.input_parameters.get('period', 7200)
@property
def granularity(self):
return self.input_parameters.get('granularity', 300)
@classmethod
def get_display_name(cls):
return _("Basic offline consolidation")
@@ -132,6 +143,12 @@ class BasicConsolidation(base.ServerConsolidationBaseStrategy):
"type": "number",
"default": 7200
},
"granularity": {
"description": "The time between two measures in an "
"aggregated timeseries of a metric.",
"type": "number",
"default": 300
},
},
}
@@ -142,7 +159,7 @@ class BasicConsolidation(base.ServerConsolidationBaseStrategy):
"datasource",
help="Data source to use in order to query the needed metrics",
default="ceilometer",
choices=["ceilometer", "monasca"]),
choices=["ceilometer", "monasca", "gnocchi"])
]
@property
@@ -165,6 +182,16 @@ class BasicConsolidation(base.ServerConsolidationBaseStrategy):
def monasca(self, monasca):
self._monasca = monasca
@property
def gnocchi(self):
if self._gnocchi is None:
self._gnocchi = gnoc.GnocchiHelper(osc=self.osc)
return self._gnocchi
@gnocchi.setter
def gnocchi(self, gnocchi):
self._gnocchi = gnocchi
def check_migration(self, source_node, destination_node,
instance_to_migrate):
"""Check if the migration is possible
@@ -260,6 +287,19 @@ class BasicConsolidation(base.ServerConsolidationBaseStrategy):
period=self.period,
aggregate='avg',
)
elif self.config.datasource == "gnocchi":
resource_id = "%s_%s" % (node.uuid, node.hostname)
stop_time = datetime.datetime.utcnow()
start_time = stop_time - datetime.timedelta(
seconds=int(self.period))
return self.gnocchi.statistic_aggregation(
resource_id=resource_id,
metric=metric_name,
granularity=self.granularity,
start_time=start_time,
stop_time=stop_time,
aggregation='mean'
)
elif self.config.datasource == "monasca":
statistics = self.monasca.statistic_aggregation(
meter_name=metric_name,
@@ -289,6 +329,18 @@ class BasicConsolidation(base.ServerConsolidationBaseStrategy):
period=self.period,
aggregate='avg'
)
elif self.config.datasource == "gnocchi":
stop_time = datetime.datetime.utcnow()
start_time = stop_time - datetime.timedelta(
seconds=int(self.period))
return self.gnocchi.statistic_aggregation(
resource_id=instance.uuid,
metric=metric_name,
granularity=self.granularity,
start_time=start_time,
stop_time=stop_time,
aggregation='mean',
)
elif self.config.datasource == "monasca":
statistics = self.monasca.statistic_aggregation(
meter_name=metric_name,

View File

@@ -0,0 +1,216 @@
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import oslo_utils
class FakeGnocchiMetrics(object):
def __init__(self):
self.emptytype = ""
def empty_one_metric(self, emptytype):
self.emptytype = emptytype
def mock_get_statistics(self, resource_id, metric, granularity,
start_time, stop_time, aggregation='mean'):
result = 0
meter_name = metric
if meter_name == "hardware.cpu.util":
result = self.get_usage_node_cpu(resource_id)
elif meter_name == "compute.node.cpu.percent":
result = self.get_usage_node_cpu(resource_id)
elif meter_name == "hardware.memory.used":
result = self.get_usage_node_ram(resource_id)
elif meter_name == "cpu_util":
result = self.get_average_usage_instance_cpu(resource_id)
elif meter_name == "memory.resident":
result = self.get_average_usage_instance_memory(resource_id)
elif meter_name == "hardware.ipmi.node.outlet_temperature":
result = self.get_average_outlet_temperature(resource_id)
elif meter_name == "hardware.ipmi.node.airflow":
result = self.get_average_airflow(resource_id)
elif meter_name == "hardware.ipmi.node.temperature":
result = self.get_average_inlet_t(resource_id)
elif meter_name == "hardware.ipmi.node.power":
result = self.get_average_power(resource_id)
return result
@staticmethod
def get_average_outlet_temperature(uuid):
"""The average outlet temperature for host"""
mock = {}
mock['Node_0'] = 30
# use a big value to make sure it exceeds threshold
mock['Node_1'] = 100
if uuid not in mock.keys():
mock[uuid] = 100
return mock[str(uuid)]
@staticmethod
def get_usage_node_ram(uuid):
mock = {}
# Gnocchi returns hardware.memory.used samples in KB.
mock['Node_0'] = 7 * oslo_utils.units.Ki
mock['Node_1'] = 5 * oslo_utils.units.Ki
mock['Node_2'] = 29 * oslo_utils.units.Ki
mock['Node_3'] = 8 * oslo_utils.units.Ki
mock['Node_4'] = 4 * oslo_utils.units.Ki
if uuid not in mock.keys():
mock[uuid] = 8
return float(mock[str(uuid)])
@staticmethod
def get_average_airflow(uuid):
"""The average outlet temperature for host"""
mock = {}
mock['Node_0'] = 400
# use a big value to make sure it exceeds threshold
mock['Node_1'] = 100
if uuid not in mock.keys():
mock[uuid] = 200
return mock[str(uuid)]
@staticmethod
def get_average_inlet_t(uuid):
"""The average outlet temperature for host"""
mock = {}
mock['Node_0'] = 24
mock['Node_1'] = 26
if uuid not in mock.keys():
mock[uuid] = 28
return mock[str(uuid)]
@staticmethod
def get_average_power(uuid):
"""The average outlet temperature for host"""
mock = {}
mock['Node_0'] = 260
mock['Node_1'] = 240
if uuid not in mock.keys():
mock[uuid] = 200
return mock[str(uuid)]
@staticmethod
def get_usage_node_cpu(uuid):
"""The last VM CPU usage values to average
:param uuid:00
:return:
"""
# Normalize
mock = {}
# node 0
mock['Node_0_hostname_0'] = 7
mock['Node_1_hostname_1'] = 7
# node 1
mock['Node_2_hostname_2'] = 80
# node 2
mock['Node_3_hostname_3'] = 5
mock['Node_4_hostname_4'] = 5
mock['Node_5_hostname_5'] = 10
# node 3
mock['Node_6_hostname_6'] = 8
mock['Node_19_hostname_19'] = 10
# node 4
mock['INSTANCE_7_hostname_7'] = 4
mock['Node_0'] = 7
mock['Node_1'] = 5
mock['Node_2'] = 10
mock['Node_3'] = 4
mock['Node_4'] = 2
if uuid not in mock.keys():
mock[uuid] = 8
return float(mock[str(uuid)])
@staticmethod
def get_average_usage_instance_cpu(uuid):
"""The last VM CPU usage values to average
:param uuid:00
:return:
"""
# Normalize
mock = {}
# node 0
mock['INSTANCE_0'] = 7
mock['INSTANCE_1'] = 7
# node 1
mock['INSTANCE_2'] = 10
# node 2
mock['INSTANCE_3'] = 5
mock['INSTANCE_4'] = 5
mock['INSTANCE_5'] = 10
# node 3
mock['INSTANCE_6'] = 8
# node 4
mock['INSTANCE_7'] = 4
if uuid not in mock.keys():
mock[uuid] = 8
return mock[str(uuid)]
@staticmethod
def get_average_usage_instance_memory(uuid):
mock = {}
# node 0
mock['INSTANCE_0'] = 2
mock['INSTANCE_1'] = 5
# node 1
mock['INSTANCE_2'] = 5
# node 2
mock['INSTANCE_3'] = 8
mock['INSTANCE_4'] = 5
mock['INSTANCE_5'] = 16
# node 3
mock['INSTANCE_6'] = 8
# node 4
mock['INSTANCE_7'] = 4
if uuid not in mock.keys():
mock[uuid] = 10
return mock[str(uuid)]
@staticmethod
def get_average_usage_instance_disk(uuid):
mock = {}
# node 0
mock['INSTANCE_0'] = 2
mock['INSTANCE_1'] = 2
# node 1
mock['INSTANCE_2'] = 2
# node 2
mock['INSTANCE_3'] = 10
mock['INSTANCE_4'] = 15
mock['INSTANCE_5'] = 20
# node 3
mock['INSTANCE_6'] = 8
# node 4
mock['INSTANCE_7'] = 4
if uuid not in mock.keys():
mock[uuid] = 4
return mock[str(uuid)]

View File

@@ -18,6 +18,7 @@
#
import collections
import copy
import datetime
import mock
from watcher.applier.loading import default
@@ -28,6 +29,7 @@ from watcher.decision_engine.strategy import strategies
from watcher.tests import base
from watcher.tests.decision_engine.model import ceilometer_metrics
from watcher.tests.decision_engine.model import faker_cluster_state
from watcher.tests.decision_engine.model import gnocchi_metrics
from watcher.tests.decision_engine.model import monasca_metrics
@@ -40,6 +42,9 @@ class TestBasicConsolidation(base.TestCase):
("Monasca",
{"datasource": "monasca",
"fake_datasource_cls": monasca_metrics.FakeMonascaMetrics}),
("Gnocchi",
{"datasource": "gnocchi",
"fake_datasource_cls": gnocchi_metrics.FakeGnocchiMetrics}),
]
def setUp(self):
@@ -276,10 +281,23 @@ class TestBasicConsolidation(base.TestCase):
p_monasca = mock.patch.object(strategies.BasicConsolidation, "monasca")
m_monasca = p_monasca.start()
self.addCleanup(p_monasca.stop)
p_gnocchi = mock.patch.object(strategies.BasicConsolidation, "gnocchi")
m_gnocchi = p_gnocchi.start()
self.addCleanup(p_gnocchi.stop)
datetime_patcher = mock.patch.object(
datetime, 'datetime',
mock.Mock(wraps=datetime.datetime)
)
mocked_datetime = datetime_patcher.start()
mocked_datetime.utcnow.return_value = datetime.datetime(
2017, 3, 19, 18, 53, 11, 657417)
self.addCleanup(datetime_patcher.stop)
m_monasca.return_value = mock.Mock(
statistic_aggregation=self.fake_metrics.mock_get_statistics)
m_ceilometer.return_value = mock.Mock(
statistic_aggregation=self.fake_metrics.mock_get_statistics)
m_gnocchi.return_value = mock.Mock(
statistic_aggregation=self.fake_metrics.mock_get_statistics)
self.strategy.calculate_score_node(node_1)
resource_id = "%s_%s" % (node_1.uuid, node_1.hostname)
if self.strategy.config.datasource == "ceilometer":
@@ -290,6 +308,14 @@ class TestBasicConsolidation(base.TestCase):
m_monasca.statistic_aggregation.assert_called_with(
aggregate='avg', meter_name='cpu.percent',
period=7200, dimensions={'hostname': 'Node_1'})
elif self.strategy.config.datasource == "gnocchi":
stop_time = datetime.datetime.utcnow()
start_time = stop_time - datetime.timedelta(
seconds=int('7200'))
m_gnocchi.statistic_aggregation.assert_called_with(
resource_id=resource_id, metric='compute.node.cpu.percent',
granularity=300, start_time=start_time, stop_time=stop_time,
aggregation='mean')
self.strategy.input_parameters.update({"period": 600})
self.strategy.calculate_score_node(node_1)
@@ -301,3 +327,11 @@ class TestBasicConsolidation(base.TestCase):
m_monasca.statistic_aggregation.assert_called_with(
aggregate='avg', meter_name='cpu.percent',
period=600, dimensions={'hostname': 'Node_1'})
elif self.strategy.config.datasource == "gnocchi":
stop_time = datetime.datetime.utcnow()
start_time = stop_time - datetime.timedelta(
seconds=int('600'))
m_gnocchi.statistic_aggregation.assert_called_with(
resource_id=resource_id, metric='compute.node.cpu.percent',
granularity=300, start_time=start_time, stop_time=stop_time,
aggregation='mean')