Some checks failed
CI / ci (push) Has been cancelled
- Introduced `serialize_audit_for_response` and `serialize_current_cluster_for_template` functions to handle JSON serialization of audit and cluster data, enhancing data consistency for API responses and template rendering. - Updated `get_mock_context` in `mock_data.py` to utilize the new serialization functions, simplifying the mock data structure and improving readability. - Refactored `collect_context` and `collect_audits` in `views.py` to leverage the new serialization methods, ensuring a cleaner and more maintainable codebase. - Added unit tests for the new serialization functions to ensure correctness and reliability of data formatting.
133 lines
4.3 KiB
Python
133 lines
4.3 KiB
Python
from django.conf import settings
|
|
from django.core.cache import cache
|
|
from django.http import JsonResponse
|
|
from django.shortcuts import render
|
|
|
|
from dashboard.mock_data import get_mock_context
|
|
from dashboard.openstack_utils.audits import get_audits, get_current_cluster_cpu
|
|
from dashboard.openstack_utils.connect import check_openstack, get_connection
|
|
from dashboard.openstack_utils.flavor import get_flavor_list
|
|
from dashboard.prometheus_utils.query import check_prometheus, fetch_dashboard_metrics
|
|
from dashboard.serializers import (
|
|
serialize_audit_for_response,
|
|
serialize_current_cluster_for_template,
|
|
)
|
|
from dashboard.stats import (
|
|
CACHE_KEY_AUDITS,
|
|
CACHE_KEY_CURRENT_CLUSTER,
|
|
CACHE_KEY_SOURCE_STATUS,
|
|
CACHE_KEY_STATS,
|
|
EMPTY_FLAVORS,
|
|
build_stats,
|
|
)
|
|
|
|
|
|
def _empty_metrics():
|
|
"""Metrics dict with zero/default values for skeleton context."""
|
|
return {
|
|
"hosts_total": 0,
|
|
"pcpu_total": 0,
|
|
"pcpu_usage": 0,
|
|
"vcpu_allocated": 0,
|
|
"vcpu_overcommit_max": 0,
|
|
"pram_total": 0,
|
|
"pram_usage": 0,
|
|
"vram_allocated": 0,
|
|
"vram_overcommit_max": 0,
|
|
"vm_count": 0,
|
|
"vm_active": 0,
|
|
}
|
|
|
|
|
|
def collect_context():
|
|
connection = get_connection()
|
|
region_name = connection._compute_region
|
|
flavors = get_flavor_list(connection=connection)
|
|
audits = get_audits(connection=connection)
|
|
metrics = fetch_dashboard_metrics()
|
|
context = build_stats(metrics, region_name, flavors)
|
|
context["audits"] = [serialize_audit_for_response(a) for a in audits]
|
|
current_cluster = get_current_cluster_cpu(connection)
|
|
context["current_cluster"] = serialize_current_cluster_for_template(current_cluster)
|
|
return context
|
|
|
|
|
|
def collect_stats():
|
|
"""Build stats dict: region, pcpu, pram, vcpu, vram, vm, flavors (no audits)."""
|
|
connection = get_connection()
|
|
region_name = connection._compute_region
|
|
flavors = get_flavor_list(connection=connection)
|
|
metrics = fetch_dashboard_metrics()
|
|
return build_stats(metrics, region_name, flavors)
|
|
|
|
|
|
def collect_audits():
|
|
"""Build audits list with serialized fields for frontend."""
|
|
connection = get_connection()
|
|
audits = get_audits(connection=connection)
|
|
return [serialize_audit_for_response(a) for a in audits]
|
|
|
|
|
|
def _skeleton_context():
|
|
"""Minimal context for skeleton-only index render."""
|
|
context = build_stats(_empty_metrics(), "—", EMPTY_FLAVORS)
|
|
context["skeleton"] = True
|
|
context["audits"] = []
|
|
context["current_cluster"] = {
|
|
"host_labels": "[]",
|
|
"cpu_current": "[]",
|
|
}
|
|
return context
|
|
|
|
|
|
def index(request):
|
|
if getattr(settings, "USE_MOCK_DATA", False):
|
|
context = get_mock_context()
|
|
return render(request, "index.html", context)
|
|
context = _skeleton_context()
|
|
return render(request, "index.html", context)
|
|
|
|
|
|
def api_stats(request):
|
|
cache_ttl = getattr(settings, "DASHBOARD_CACHE_TTL", 120)
|
|
data = cache.get(CACHE_KEY_STATS)
|
|
if data is None:
|
|
data = collect_stats()
|
|
cache.set(CACHE_KEY_STATS, data, timeout=cache_ttl)
|
|
return JsonResponse(data)
|
|
|
|
|
|
def api_audits(request):
|
|
cache_ttl = getattr(settings, "DASHBOARD_CACHE_TTL", 120)
|
|
audits = cache.get(CACHE_KEY_AUDITS)
|
|
current_cluster = cache.get(CACHE_KEY_CURRENT_CLUSTER)
|
|
if audits is None:
|
|
audits = collect_audits()
|
|
cache.set(CACHE_KEY_AUDITS, audits, timeout=cache_ttl)
|
|
if current_cluster is None:
|
|
connection = get_connection()
|
|
current_cluster = get_current_cluster_cpu(connection)
|
|
cache.set(CACHE_KEY_CURRENT_CLUSTER, current_cluster, timeout=cache_ttl)
|
|
return JsonResponse({"audits": audits, "current_cluster": current_cluster})
|
|
|
|
|
|
def api_source_status(request):
|
|
"""Return status of Prometheus and OpenStack data sources (ok / error / mock)."""
|
|
if getattr(settings, "USE_MOCK_DATA", False):
|
|
return JsonResponse(
|
|
{
|
|
"prometheus": {"status": "mock"},
|
|
"openstack": {"status": "mock"},
|
|
}
|
|
)
|
|
|
|
cache_ttl = getattr(settings, "SOURCE_STATUS_CACHE_TTL", 30)
|
|
data = cache.get(CACHE_KEY_SOURCE_STATUS)
|
|
if data is None:
|
|
data = {
|
|
"prometheus": check_prometheus(),
|
|
"openstack": check_openstack(),
|
|
}
|
|
cache.set(CACHE_KEY_SOURCE_STATUS, data, timeout=cache_ttl)
|
|
return JsonResponse(data)
|