develop #1

Merged
Arnike merged 21 commits from develop into main 2026-02-07 18:06:51 +03:00
19 changed files with 181 additions and 16 deletions
Showing only changes of commit 917a7758bc - Show all commits

View File

@@ -112,4 +112,8 @@ def get_mock_context():
"third_common_flavor": {"name": "m1.large", "count": 4},
},
"audits": audits,
"current_cluster": {
"host_labels": json.dumps(host_labels),
"cpu_current": json.dumps(cpu_current),
},
}

View File

@@ -33,6 +33,19 @@ def convert_cpu_data(data: list):
.reset_index()
)
def get_current_cluster_cpu(connection: Connection) -> dict:
"""Return current per-host CPU state for the cluster (no Watcher dependency)."""
cpu_data = query_prometheus(PROMETHEUS_METRICS['cpu_usage'])
cpu_metrics = convert_cpu_data(data=cpu_data)
if cpu_metrics.empty:
return {"host_labels": [], "cpu_current": []}
return {
"host_labels": cpu_metrics['host'].to_list(),
"cpu_current": cpu_metrics['cpu_usage'].to_list(),
}
def get_audits(connection: Connection) -> list[dict] | None:
session = connection.session

View File

@@ -1,7 +1,9 @@
"""Tests for dashboard.openstack_utils.audits."""
from unittest.mock import MagicMock, patch
from django.test import TestCase
from dashboard.openstack_utils.audits import convert_cpu_data
from dashboard.openstack_utils.audits import convert_cpu_data, get_current_cluster_cpu
class ConvertCpuDataTest(TestCase):
@@ -41,3 +43,29 @@ class ConvertCpuDataTest(TestCase):
self.assertIn("host", result.columns)
self.assertIn("cpu_usage", result.columns)
self.assertEqual(len(result), 0)
class GetCurrentClusterCpuTest(TestCase):
"""Tests for get_current_cluster_cpu."""
@patch("dashboard.openstack_utils.audits.query_prometheus")
def test_returns_empty_lists_when_no_data(self, mock_query):
mock_query.return_value = []
conn = MagicMock()
result = get_current_cluster_cpu(conn)
self.assertEqual(result["host_labels"], [])
self.assertEqual(result["cpu_current"], [])
@patch("dashboard.openstack_utils.audits.convert_cpu_data")
@patch("dashboard.openstack_utils.audits.query_prometheus")
def test_returns_host_labels_and_cpu_current(self, mock_query, mock_convert):
import pandas as pd
mock_query.return_value = [{"metric": {"host": "h0"}, "values": [[0, "1.0"]]}]
mock_convert.return_value = pd.DataFrame({
"host": ["compute-0", "compute-1"],
"cpu_usage": [25.0, 35.0],
})
conn = MagicMock()
result = get_current_cluster_cpu(conn)
self.assertEqual(result["host_labels"], ["compute-0", "compute-1"])
self.assertEqual(result["cpu_current"], [25.0, 35.0])

View File

@@ -11,7 +11,7 @@ class GetMockContextTest(TestCase):
def test_returns_all_top_level_keys(self):
ctx = get_mock_context()
expected_keys = {"region", "pcpu", "vcpu", "pram", "vram", "vm", "flavors", "audits"}
expected_keys = {"region", "pcpu", "vcpu", "pram", "vram", "vm", "flavors", "audits", "current_cluster"}
self.assertEqual(set(ctx.keys()), expected_keys)
def test_region_structure(self):

View File

@@ -74,14 +74,16 @@ class CollectContextTest(TestCase):
conn._compute_region = region_name
return conn
@patch("dashboard.views.get_current_cluster_cpu")
@patch("dashboard.views._fetch_prometheus_metrics")
@patch("dashboard.views.get_audits")
@patch("dashboard.views.get_flavor_list")
@patch("dashboard.views.get_connection")
def test_collect_context_structure_and_calculation(
self, mock_get_connection, mock_get_flavor_list, mock_get_audits, mock_fetch_metrics
self, mock_get_connection, mock_get_flavor_list, mock_get_audits, mock_fetch_metrics, mock_get_current_cluster_cpu
):
mock_get_connection.return_value = self._make_mock_connection("my-region")
mock_get_current_cluster_cpu.return_value = {"host_labels": ["h0", "h1"], "cpu_current": [30.0, 40.0]}
mock_get_flavor_list.return_value = {
"first_common_flavor": {"name": "m1.small", "count": 5},
"second_common_flavor": {"name": "", "count": 0},
@@ -124,6 +126,9 @@ class CollectContextTest(TestCase):
import json
self.assertIsInstance(context["audits"][0]["migrations"], str)
self.assertEqual(json.loads(context["audits"][0]["host_labels"]), ["h0", "h1"])
self.assertIn("current_cluster", context)
self.assertEqual(json.loads(context["current_cluster"]["host_labels"]), ["h0", "h1"])
self.assertEqual(json.loads(context["current_cluster"]["cpu_current"]), [30.0, 40.0])
class ApiStatsTest(TestCase):
@@ -196,10 +201,11 @@ class ApiAuditsTest(TestCase):
def setUp(self):
self.factory = RequestFactory()
@patch("dashboard.views.get_current_cluster_cpu")
@patch("dashboard.views.get_audits")
@patch("dashboard.views.get_connection")
def test_api_audits_returns_json_audits_list(
self, mock_get_connection, mock_get_audits
self, mock_get_connection, mock_get_audits, mock_get_current_cluster_cpu
):
mock_get_connection.return_value = MagicMock()
mock_get_audits.return_value = [
@@ -218,6 +224,7 @@ class ApiAuditsTest(TestCase):
"cpu_projected": [35.0, 35.0],
}
]
mock_get_current_cluster_cpu.return_value = {"host_labels": ["h0", "h1"], "cpu_current": [30.0, 40.0]}
cache.clear()
request = self.factory.get("/api/audits/")
with patch("dashboard.views.settings") as mock_settings:
@@ -231,15 +238,24 @@ class ApiAuditsTest(TestCase):
self.assertEqual(data["audits"][0]["name"], "Test Audit")
self.assertIsInstance(data["audits"][0]["migrations"], str)
self.assertIsInstance(data["audits"][0]["host_labels"], str)
self.assertIn("current_cluster", data)
self.assertEqual(data["current_cluster"]["host_labels"], ["h0", "h1"])
self.assertEqual(data["current_cluster"]["cpu_current"], [30.0, 40.0])
@patch("dashboard.views.get_current_cluster_cpu")
@patch("dashboard.views.collect_audits")
@patch("dashboard.views.settings")
def test_api_audits_uses_cache(self, mock_settings, mock_collect_audits):
def test_api_audits_uses_cache(self, mock_settings, mock_collect_audits, mock_get_current_cluster_cpu):
mock_settings.DASHBOARD_CACHE_TTL = 120
cached = [{"id": "cached-1", "name": "Cached Audit", "migrations": "[]", "host_labels": "[]", "cpu_current": "[]", "cpu_projected": "[]"}]
cached_audits = [{"id": "cached-1", "name": "Cached Audit", "migrations": "[]", "host_labels": "[]", "cpu_current": "[]", "cpu_projected": "[]"}]
cached_cluster = {"host_labels": ["cached-h0"], "cpu_current": [10.0]}
cache.clear()
cache.set("dashboard_audits", cached, timeout=120)
cache.set("dashboard_audits", cached_audits, timeout=120)
cache.set("dashboard_current_cluster", cached_cluster, timeout=120)
request = self.factory.get("/api/audits/")
response = api_audits(request)
mock_collect_audits.assert_not_called()
self.assertEqual(json.loads(response.content)["audits"][0]["name"], "Cached Audit")
mock_get_current_cluster_cpu.assert_not_called()
data = json.loads(response.content)
self.assertEqual(data["audits"][0]["name"], "Cached Audit")
self.assertEqual(data["current_cluster"], cached_cluster)

View File

@@ -8,7 +8,7 @@ from django.shortcuts import render
from dashboard.openstack_utils.connect import get_connection
from dashboard.openstack_utils.flavor import get_flavor_list
from dashboard.prometheus_utils.query import query_prometheus
from dashboard.openstack_utils.audits import get_audits
from dashboard.openstack_utils.audits import get_audits, get_current_cluster_cpu
from dashboard.mock_data import get_mock_context
# Prometheus queries run in parallel (query_key -> query string)
@@ -122,6 +122,11 @@ def collect_context():
"flavors": flavors,
"audits": audits,
}
current_cluster = get_current_cluster_cpu(connection)
context["current_cluster"] = {
"host_labels": json.dumps(current_cluster["host_labels"]),
"cpu_current": json.dumps(current_cluster["cpu_current"]),
}
# Serialize audit list fields for JavaScript so cached context is render-ready
for audit in context["audits"]:
audit["migrations"] = json.dumps(audit["migrations"])
@@ -221,6 +226,10 @@ def _skeleton_context():
"vm": {"count": 0, "active": 0, "stopped": 0, "avg_cpu": 0, "avg_ram": 0, "density": 0},
"flavors": empty_flavors,
"audits": [],
"current_cluster": {
"host_labels": "[]",
"cpu_current": "[]",
},
}
@@ -243,10 +252,16 @@ def api_stats(request):
def api_audits(request):
cache_key = "dashboard_audits"
cache_key_audits = "dashboard_audits"
cache_key_cluster = "dashboard_current_cluster"
cache_ttl = getattr(settings, "DASHBOARD_CACHE_TTL", 120)
audits = cache.get(cache_key)
audits = cache.get(cache_key_audits)
current_cluster = cache.get(cache_key_cluster)
if audits is None:
audits = collect_audits()
cache.set(cache_key, audits, timeout=cache_ttl)
return JsonResponse({"audits": audits})
cache.set(cache_key_audits, audits, timeout=cache_ttl)
if current_cluster is None:
connection = get_connection()
current_cluster = get_current_cluster_cpu(connection)
cache.set(cache_key_cluster, current_cluster, timeout=cache_ttl)
return JsonResponse({"audits": audits, "current_cluster": current_cluster})

10
package-lock.json generated
View File

@@ -12,6 +12,7 @@
"@tailwindcss/cli": "^4.1.17"
},
"devDependencies": {
"@fontsource/dm-sans": "^5.2.8",
"@tailwindcss/typography": "^0.5.19",
"autoprefixer": "^10.4.22",
"daisyui": "^5.5.5",
@@ -19,6 +20,15 @@
"tailwindcss": "^4.1.17"
}
},
"node_modules/@fontsource/dm-sans": {
"version": "5.2.8",
"resolved": "https://registry.npmjs.org/@fontsource/dm-sans/-/dm-sans-5.2.8.tgz",
"integrity": "sha512-tlovG42m9ESG28WiHpLq3F5umAlm64rv0RkqTbYowRn70e9OlRr5a3yTJhrhrY+k5lftR/OFJjPzOLQzk8EfCA==",
"dev": true,
"funding": {
"url": "https://github.com/sponsors/ayuhito"
}
},
"node_modules/@jridgewell/gen-mapping": {
"version": "0.3.13",
"resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.13.tgz",

View File

@@ -16,6 +16,7 @@
"license": "ISC",
"type": "commonjs",
"devDependencies": {
"@fontsource/dm-sans": "^5.2.8",
"@tailwindcss/typography": "^0.5.19",
"autoprefixer": "^10.4.22",
"daisyui": "^5.5.5",

View File

@@ -1,3 +1,61 @@
/* DM Sans local webfonts (no external requests) */
@font-face {
font-family: 'DM Sans';
font-style: normal;
font-display: swap;
font-weight: 400;
src: url('../fonts/dm-sans-latin-400-normal.woff2') format('woff2');
}
@font-face {
font-family: 'DM Sans';
font-style: normal;
font-display: swap;
font-weight: 500;
src: url('../fonts/dm-sans-latin-500-normal.woff2') format('woff2');
}
@font-face {
font-family: 'DM Sans';
font-style: normal;
font-display: swap;
font-weight: 600;
src: url('../fonts/dm-sans-latin-600-normal.woff2') format('woff2');
}
@font-face {
font-family: 'DM Sans';
font-style: normal;
font-display: swap;
font-weight: 700;
src: url('../fonts/dm-sans-latin-700-normal.woff2') format('woff2');
}
@font-face {
font-family: 'DM Sans';
font-style: italic;
font-display: swap;
font-weight: 400;
src: url('../fonts/dm-sans-latin-400-italic.woff2') format('woff2');
}
@font-face {
font-family: 'DM Sans';
font-style: italic;
font-display: swap;
font-weight: 500;
src: url('../fonts/dm-sans-latin-500-italic.woff2') format('woff2');
}
@font-face {
font-family: 'DM Sans';
font-style: italic;
font-display: swap;
font-weight: 600;
src: url('../fonts/dm-sans-latin-600-italic.woff2') format('woff2');
}
@font-face {
font-family: 'DM Sans';
font-style: italic;
font-display: swap;
font-weight: 700;
src: url('../fonts/dm-sans-latin-700-italic.woff2') format('woff2');
}
@import "tailwindcss";
@plugin "daisyui";

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@@ -6,9 +6,6 @@
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>{% block title %}SWatcher{% endblock %}</title>
<link rel="icon" href="{% static 'favicon.ico' %}" type="image/x-icon">
<link rel="preconnect" href="https://fonts.googleapis.com">
<link rel="preconnect" href="https://fonts.gstatic.com" crossorigin>
<link href="https://fonts.googleapis.com/css2?family=DM+Sans:ital,opsz,wght@0,9..40,100..1000;1,9..40,100..1000&display=swap" rel="stylesheet">
<link rel="stylesheet" href="{% static 'css/output.css' %}">
<script src="{% static 'js/html2canvas-pro.min.js' %}"></script>
<script src="{% static 'js/jspdf.umd.min.js' %}"></script>

View File

@@ -443,6 +443,7 @@
{% block script %}
<script>
const SKELETON_MODE = {{ skeleton|yesno:"true,false" }};
const CURRENT_CLUSTER = {% if current_cluster %}{ "host_labels": {{ current_cluster.host_labels|safe }}, "cpu_current": {{ current_cluster.cpu_current|safe }} }{% else %}null{% endif %};
let auditData = {
{% if not skeleton %}
@@ -826,6 +827,19 @@
]).then(function(results) {
renderStats(results[0]);
renderAudits(results[1].audits);
if (!results[1].audits || results[1].audits.length === 0) {
var cc = results[1].current_cluster;
if (cc && cc.host_labels && cc.cpu_current && cc.host_labels.length) {
auditData["current"] = {
hostData: {
labels: cc.host_labels,
current: cc.cpu_current,
projected: cc.cpu_current
}
};
updateCPUCharts('current');
}
}
}).catch(function(err) {
var msg = err.status ? 'Failed to load data (' + err.status + ')' : 'Failed to load data';
var countEl = document.getElementById('auditsCount');
@@ -838,6 +852,15 @@
if (initialAudit && auditData[initialAudit]) {
document.getElementById('auditSelector').dispatchEvent(new Event('change'));
loadSelectedAudit();
} else if (!initialAudit && CURRENT_CLUSTER && CURRENT_CLUSTER.host_labels && CURRENT_CLUSTER.host_labels.length) {
auditData["current"] = {
hostData: {
labels: CURRENT_CLUSTER.host_labels,
current: CURRENT_CLUSTER.cpu_current,
projected: CURRENT_CLUSTER.cpu_current
}
};
updateCPUCharts('current');
}
}
});