Add DM Sans font integration and enhance dashboard context
- Added DM Sans font to the project, including multiple weights and styles for improved typography. - Updated package.json and package-lock.json to include @fontsource/dm-sans dependency. - Enhanced dashboard context to include current cluster CPU state, integrating new data into the context and API responses. - Updated relevant templates and JavaScript to utilize the new current cluster data for better visualization and user experience.
This commit is contained in:
@@ -112,4 +112,8 @@ def get_mock_context():
|
||||
"third_common_flavor": {"name": "m1.large", "count": 4},
|
||||
},
|
||||
"audits": audits,
|
||||
"current_cluster": {
|
||||
"host_labels": json.dumps(host_labels),
|
||||
"cpu_current": json.dumps(cpu_current),
|
||||
},
|
||||
}
|
||||
|
||||
@@ -33,6 +33,19 @@ def convert_cpu_data(data: list):
|
||||
.reset_index()
|
||||
)
|
||||
|
||||
|
||||
def get_current_cluster_cpu(connection: Connection) -> dict:
|
||||
"""Return current per-host CPU state for the cluster (no Watcher dependency)."""
|
||||
cpu_data = query_prometheus(PROMETHEUS_METRICS['cpu_usage'])
|
||||
cpu_metrics = convert_cpu_data(data=cpu_data)
|
||||
if cpu_metrics.empty:
|
||||
return {"host_labels": [], "cpu_current": []}
|
||||
return {
|
||||
"host_labels": cpu_metrics['host'].to_list(),
|
||||
"cpu_current": cpu_metrics['cpu_usage'].to_list(),
|
||||
}
|
||||
|
||||
|
||||
def get_audits(connection: Connection) -> list[dict] | None:
|
||||
session = connection.session
|
||||
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
"""Tests for dashboard.openstack_utils.audits."""
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
from django.test import TestCase
|
||||
|
||||
from dashboard.openstack_utils.audits import convert_cpu_data
|
||||
from dashboard.openstack_utils.audits import convert_cpu_data, get_current_cluster_cpu
|
||||
|
||||
|
||||
class ConvertCpuDataTest(TestCase):
|
||||
@@ -41,3 +43,29 @@ class ConvertCpuDataTest(TestCase):
|
||||
self.assertIn("host", result.columns)
|
||||
self.assertIn("cpu_usage", result.columns)
|
||||
self.assertEqual(len(result), 0)
|
||||
|
||||
|
||||
class GetCurrentClusterCpuTest(TestCase):
|
||||
"""Tests for get_current_cluster_cpu."""
|
||||
|
||||
@patch("dashboard.openstack_utils.audits.query_prometheus")
|
||||
def test_returns_empty_lists_when_no_data(self, mock_query):
|
||||
mock_query.return_value = []
|
||||
conn = MagicMock()
|
||||
result = get_current_cluster_cpu(conn)
|
||||
self.assertEqual(result["host_labels"], [])
|
||||
self.assertEqual(result["cpu_current"], [])
|
||||
|
||||
@patch("dashboard.openstack_utils.audits.convert_cpu_data")
|
||||
@patch("dashboard.openstack_utils.audits.query_prometheus")
|
||||
def test_returns_host_labels_and_cpu_current(self, mock_query, mock_convert):
|
||||
import pandas as pd
|
||||
mock_query.return_value = [{"metric": {"host": "h0"}, "values": [[0, "1.0"]]}]
|
||||
mock_convert.return_value = pd.DataFrame({
|
||||
"host": ["compute-0", "compute-1"],
|
||||
"cpu_usage": [25.0, 35.0],
|
||||
})
|
||||
conn = MagicMock()
|
||||
result = get_current_cluster_cpu(conn)
|
||||
self.assertEqual(result["host_labels"], ["compute-0", "compute-1"])
|
||||
self.assertEqual(result["cpu_current"], [25.0, 35.0])
|
||||
|
||||
@@ -11,7 +11,7 @@ class GetMockContextTest(TestCase):
|
||||
|
||||
def test_returns_all_top_level_keys(self):
|
||||
ctx = get_mock_context()
|
||||
expected_keys = {"region", "pcpu", "vcpu", "pram", "vram", "vm", "flavors", "audits"}
|
||||
expected_keys = {"region", "pcpu", "vcpu", "pram", "vram", "vm", "flavors", "audits", "current_cluster"}
|
||||
self.assertEqual(set(ctx.keys()), expected_keys)
|
||||
|
||||
def test_region_structure(self):
|
||||
|
||||
@@ -74,14 +74,16 @@ class CollectContextTest(TestCase):
|
||||
conn._compute_region = region_name
|
||||
return conn
|
||||
|
||||
@patch("dashboard.views.get_current_cluster_cpu")
|
||||
@patch("dashboard.views._fetch_prometheus_metrics")
|
||||
@patch("dashboard.views.get_audits")
|
||||
@patch("dashboard.views.get_flavor_list")
|
||||
@patch("dashboard.views.get_connection")
|
||||
def test_collect_context_structure_and_calculation(
|
||||
self, mock_get_connection, mock_get_flavor_list, mock_get_audits, mock_fetch_metrics
|
||||
self, mock_get_connection, mock_get_flavor_list, mock_get_audits, mock_fetch_metrics, mock_get_current_cluster_cpu
|
||||
):
|
||||
mock_get_connection.return_value = self._make_mock_connection("my-region")
|
||||
mock_get_current_cluster_cpu.return_value = {"host_labels": ["h0", "h1"], "cpu_current": [30.0, 40.0]}
|
||||
mock_get_flavor_list.return_value = {
|
||||
"first_common_flavor": {"name": "m1.small", "count": 5},
|
||||
"second_common_flavor": {"name": "—", "count": 0},
|
||||
@@ -124,6 +126,9 @@ class CollectContextTest(TestCase):
|
||||
import json
|
||||
self.assertIsInstance(context["audits"][0]["migrations"], str)
|
||||
self.assertEqual(json.loads(context["audits"][0]["host_labels"]), ["h0", "h1"])
|
||||
self.assertIn("current_cluster", context)
|
||||
self.assertEqual(json.loads(context["current_cluster"]["host_labels"]), ["h0", "h1"])
|
||||
self.assertEqual(json.loads(context["current_cluster"]["cpu_current"]), [30.0, 40.0])
|
||||
|
||||
|
||||
class ApiStatsTest(TestCase):
|
||||
@@ -196,10 +201,11 @@ class ApiAuditsTest(TestCase):
|
||||
def setUp(self):
|
||||
self.factory = RequestFactory()
|
||||
|
||||
@patch("dashboard.views.get_current_cluster_cpu")
|
||||
@patch("dashboard.views.get_audits")
|
||||
@patch("dashboard.views.get_connection")
|
||||
def test_api_audits_returns_json_audits_list(
|
||||
self, mock_get_connection, mock_get_audits
|
||||
self, mock_get_connection, mock_get_audits, mock_get_current_cluster_cpu
|
||||
):
|
||||
mock_get_connection.return_value = MagicMock()
|
||||
mock_get_audits.return_value = [
|
||||
@@ -218,6 +224,7 @@ class ApiAuditsTest(TestCase):
|
||||
"cpu_projected": [35.0, 35.0],
|
||||
}
|
||||
]
|
||||
mock_get_current_cluster_cpu.return_value = {"host_labels": ["h0", "h1"], "cpu_current": [30.0, 40.0]}
|
||||
cache.clear()
|
||||
request = self.factory.get("/api/audits/")
|
||||
with patch("dashboard.views.settings") as mock_settings:
|
||||
@@ -231,15 +238,24 @@ class ApiAuditsTest(TestCase):
|
||||
self.assertEqual(data["audits"][0]["name"], "Test Audit")
|
||||
self.assertIsInstance(data["audits"][0]["migrations"], str)
|
||||
self.assertIsInstance(data["audits"][0]["host_labels"], str)
|
||||
self.assertIn("current_cluster", data)
|
||||
self.assertEqual(data["current_cluster"]["host_labels"], ["h0", "h1"])
|
||||
self.assertEqual(data["current_cluster"]["cpu_current"], [30.0, 40.0])
|
||||
|
||||
@patch("dashboard.views.get_current_cluster_cpu")
|
||||
@patch("dashboard.views.collect_audits")
|
||||
@patch("dashboard.views.settings")
|
||||
def test_api_audits_uses_cache(self, mock_settings, mock_collect_audits):
|
||||
def test_api_audits_uses_cache(self, mock_settings, mock_collect_audits, mock_get_current_cluster_cpu):
|
||||
mock_settings.DASHBOARD_CACHE_TTL = 120
|
||||
cached = [{"id": "cached-1", "name": "Cached Audit", "migrations": "[]", "host_labels": "[]", "cpu_current": "[]", "cpu_projected": "[]"}]
|
||||
cached_audits = [{"id": "cached-1", "name": "Cached Audit", "migrations": "[]", "host_labels": "[]", "cpu_current": "[]", "cpu_projected": "[]"}]
|
||||
cached_cluster = {"host_labels": ["cached-h0"], "cpu_current": [10.0]}
|
||||
cache.clear()
|
||||
cache.set("dashboard_audits", cached, timeout=120)
|
||||
cache.set("dashboard_audits", cached_audits, timeout=120)
|
||||
cache.set("dashboard_current_cluster", cached_cluster, timeout=120)
|
||||
request = self.factory.get("/api/audits/")
|
||||
response = api_audits(request)
|
||||
mock_collect_audits.assert_not_called()
|
||||
self.assertEqual(json.loads(response.content)["audits"][0]["name"], "Cached Audit")
|
||||
mock_get_current_cluster_cpu.assert_not_called()
|
||||
data = json.loads(response.content)
|
||||
self.assertEqual(data["audits"][0]["name"], "Cached Audit")
|
||||
self.assertEqual(data["current_cluster"], cached_cluster)
|
||||
|
||||
@@ -8,7 +8,7 @@ from django.shortcuts import render
|
||||
from dashboard.openstack_utils.connect import get_connection
|
||||
from dashboard.openstack_utils.flavor import get_flavor_list
|
||||
from dashboard.prometheus_utils.query import query_prometheus
|
||||
from dashboard.openstack_utils.audits import get_audits
|
||||
from dashboard.openstack_utils.audits import get_audits, get_current_cluster_cpu
|
||||
from dashboard.mock_data import get_mock_context
|
||||
|
||||
# Prometheus queries run in parallel (query_key -> query string)
|
||||
@@ -122,6 +122,11 @@ def collect_context():
|
||||
"flavors": flavors,
|
||||
"audits": audits,
|
||||
}
|
||||
current_cluster = get_current_cluster_cpu(connection)
|
||||
context["current_cluster"] = {
|
||||
"host_labels": json.dumps(current_cluster["host_labels"]),
|
||||
"cpu_current": json.dumps(current_cluster["cpu_current"]),
|
||||
}
|
||||
# Serialize audit list fields for JavaScript so cached context is render-ready
|
||||
for audit in context["audits"]:
|
||||
audit["migrations"] = json.dumps(audit["migrations"])
|
||||
@@ -221,6 +226,10 @@ def _skeleton_context():
|
||||
"vm": {"count": 0, "active": 0, "stopped": 0, "avg_cpu": 0, "avg_ram": 0, "density": 0},
|
||||
"flavors": empty_flavors,
|
||||
"audits": [],
|
||||
"current_cluster": {
|
||||
"host_labels": "[]",
|
||||
"cpu_current": "[]",
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
@@ -243,10 +252,16 @@ def api_stats(request):
|
||||
|
||||
|
||||
def api_audits(request):
|
||||
cache_key = "dashboard_audits"
|
||||
cache_key_audits = "dashboard_audits"
|
||||
cache_key_cluster = "dashboard_current_cluster"
|
||||
cache_ttl = getattr(settings, "DASHBOARD_CACHE_TTL", 120)
|
||||
audits = cache.get(cache_key)
|
||||
audits = cache.get(cache_key_audits)
|
||||
current_cluster = cache.get(cache_key_cluster)
|
||||
if audits is None:
|
||||
audits = collect_audits()
|
||||
cache.set(cache_key, audits, timeout=cache_ttl)
|
||||
return JsonResponse({"audits": audits})
|
||||
cache.set(cache_key_audits, audits, timeout=cache_ttl)
|
||||
if current_cluster is None:
|
||||
connection = get_connection()
|
||||
current_cluster = get_current_cluster_cpu(connection)
|
||||
cache.set(cache_key_cluster, current_cluster, timeout=cache_ttl)
|
||||
return JsonResponse({"audits": audits, "current_cluster": current_cluster})
|
||||
Reference in New Issue
Block a user