From 5c81f1bd7fd0edee79e5f33751dd8299ebed6bbf Mon Sep 17 00:00:00 2001 From: Alexander Chadin Date: Fri, 9 Mar 2018 12:49:34 +0300 Subject: [PATCH] Add parameter aggregation_method for basic_consolidation This parameter is required to fix tempest multinode test. Change-Id: I4014fb7a76ce74e1426378183ecef0308bc56ce7 --- .../strategies/basic_consolidation.py | 32 +++++++++++++++++-- 1 file changed, 30 insertions(+), 2 deletions(-) diff --git a/watcher/decision_engine/strategy/strategies/basic_consolidation.py b/watcher/decision_engine/strategy/strategies/basic_consolidation.py index b239f856a..345e419f7 100644 --- a/watcher/decision_engine/strategy/strategies/basic_consolidation.py +++ b/watcher/decision_engine/strategy/strategies/basic_consolidation.py @@ -109,6 +109,12 @@ class BasicConsolidation(base.ServerConsolidationBaseStrategy): def granularity(self): return self.input_parameters.get('granularity', 300) + @property + def aggregation_method(self): + return self.input_parameters.get( + 'aggregation_method', + {"instance": 'mean', "node": 'mean'}) + @classmethod def get_display_name(cls): return _("Basic offline consolidation") @@ -142,6 +148,26 @@ class BasicConsolidation(base.ServerConsolidationBaseStrategy): "type": "number", "default": 300 }, + "aggregation_method": { + "description": "Function used to aggregate multiple " + "measures into an aggregate. For example, " + "the min aggregation method will aggregate " + "the values of different measures to the " + "minimum value of all the measures in the " + "time range.", + "type": "object", + "properties": { + "instance": { + "type": "string", + "default": 'mean' + }, + "node": { + "type": "string", + "default": 'mean' + }, + }, + "default": {"instance": 'mean', "node": 'mean'} + }, }, } @@ -258,11 +284,13 @@ class BasicConsolidation(base.ServerConsolidationBaseStrategy): def get_node_cpu_usage(self, node): resource_id = "%s_%s" % (node.uuid, node.hostname) return self.datasource_backend.get_host_cpu_usage( - resource_id, self.period, 'mean', granularity=self.granularity) + resource_id, self.period, self.aggregation_method['node'], + granularity=300) def get_instance_cpu_usage(self, instance): return self.datasource_backend.get_instance_cpu_usage( - instance.uuid, self.period, 'mean', granularity=self.granularity) + instance.uuid, self.period, self.aggregation_method['instance'], + granularity=300) def calculate_score_node(self, node): """Calculate the score that represent the utilization level