Refactor Docker setup and add mock data support

- Updated .dockerignore and .gitignore for better file management.
- Introduced .env.example for environment variable configuration.
- Added docker-compose.dev.yml for development with mock data and live reload.
- Enhanced Dockerfile to include necessary dependencies and entrypoint script.
- Created mock_data.py to provide sample data for testing without OpenStack/Prometheus.
- Added unit tests for template filters in dashboard.
- Cleaned up various files for consistency and improved readability.
This commit is contained in:
2026-02-06 16:12:21 +03:00
parent d197d1e5e2
commit 57a2933f28
33 changed files with 3319 additions and 3050 deletions

View File

@@ -1,23 +1,23 @@
.git .git
.gitignore .gitignore
__pycache__ __pycache__
*.pyc *.pyc
*.pyo *.pyo
*.pyd *.pyd
.env .env
.venv .venv
venv/ venv/
env/ env/
.vscode .vscode
.idea .idea
*.log *.log
*.sqlite3 *.sqlite3
static/ media/
media/ node_modules/
node_modules/ npm-debug.log*
npm-debug.log* yarn-debug.log*
yarn-debug.log* yarn-error.log*
yarn-error.log* Dockerfile
Dockerfile docker-compose.yml
docker-compose.yml README.md
README.md clouds.yaml

9
.env.example Normal file
View File

@@ -0,0 +1,9 @@
# Optional: copy to .env and set for your environment.
# For docker-compose, add to docker-compose.yml: env_file: [.env]
# PYTHONUNBUFFERED=1
# USE_MOCK_DATA=false
# PROMETHEUS_URL=http://127.0.0.1:1234/
# OPENSTACK_CLOUD=distlab
# OPENSTACK_REGION_NAME=cl2k1distlab
# SECRET_KEY=your-secret-key

54
.gitignore vendored
View File

@@ -1,26 +1,28 @@
# Python # Python
*.pyc *.pyc
__pycache__/ __pycache__/
venv/ venv/
.env .env
db.sqlite3 db.sqlite3
# Django # Django
media/ media/
staticfiles/ staticfiles/
# Compiled CSS # Compiled CSS
watcher_visio/static/css/output.css watcher_visio/static/css/output.css
watcher_visio/static/css/tailwindcss watcher_visio/static/css/tailwindcss
watcher_visio/static/css/tailwindcss.exe watcher_visio/static/css/tailwindcss.exe
# IDE # IDE
.vscode/ .vscode/
.idea/ .idea/
# DaisyUI # DaisyUI
static/css/output.css static/css/output.css
static/css/tailwindcss static/css/tailwindcss
# NodeJS # NodeJS
node_modules node_modules
clouds.yaml

View File

@@ -1,32 +1,37 @@
FROM alpine:3 AS build FROM alpine:3 AS build
RUN apk update && \ RUN apk update && \
apk add --no-cache --virtual .build-deps \ apk add --no-cache --virtual .build-deps \
ca-certificates gcc postgresql-dev linux-headers musl-dev \ ca-certificates gcc postgresql-dev linux-headers musl-dev \
libffi-dev jpeg-dev zlib-dev \ libffi-dev jpeg-dev zlib-dev \
git bash build-base python3-dev git bash build-base python3-dev \
dos2unix
RUN python3 -m venv /venv
ENV PATH "/venv/bin:$PATH" RUN python3 -m venv /venv
COPY ./requirements.txt / ENV PATH "/venv/bin:$PATH"
RUN pip install -r /requirements.txt COPY ./requirements.txt /
RUN pip install -r /requirements.txt
FROM alpine:3 COPY ./docker-entrypoint.sh /docker-entrypoint.sh
RUN dos2unix /docker-entrypoint.sh && \
ENV LANG C.UTF-8 chmod +x /docker-entrypoint.sh
ENV LC_ALL C.UTF-8
ENV PYTHONUNBUFFERED 1 FROM alpine:3
ENV PATH "/venv/bin:$PATH"
ENV LANG C.UTF-8
RUN apk add --no-cache --update python3 ENV LC_ALL C.UTF-8
COPY --from=build /venv /venv ENV PYTHONUNBUFFERED 1
ENV PATH "/venv/bin:$PATH"
RUN mkdir /app
WORKDIR /app RUN apk add --no-cache --update python3 curl
COPY ./ /
COPY --from=build /venv /venv
ENTRYPOINT ["/docker-entrypoint.sh"] COPY --from=build /docker-entrypoint.sh /docker-entrypoint.sh
CMD [ "python", "manage.py", "runserver", "0.0.0.0:8000" ]
WORKDIR /app
COPY . /app
ENTRYPOINT ["/docker-entrypoint.sh"]
CMD ["python3", "manage.py", "runserver", "0.0.0.0:8000"]

115
dashboard/mock_data.py Normal file
View File

@@ -0,0 +1,115 @@
"""Mock context for dashboard when USE_MOCK_DATA is enabled (no OpenStack/Prometheus)."""
import json
def get_mock_context():
"""Return a context dict with the same structure as collect_context(), render-ready."""
hosts_total = 6
pcpu_total = 48
pcpu_usage = 12.5
vcpu_allocated = 96
vcpu_overcommit_max = 2.0
pram_total = 256 * 1024**3 # 256 GB in bytes
pram_usage = 120 * 1024**3
vram_allocated = 192 * 1024**3
vram_overcommit_max = 1.5
vm_count = 24
vm_active = 22
vcpu_total = pcpu_total * vcpu_overcommit_max
vram_total = pram_total * vram_overcommit_max
# Two sample audits with serialized fields for JS
host_labels = ["compute-0", "compute-1", "compute-2", "compute-3", "compute-4", "compute-5"]
cpu_current = [45.2, 38.1, 52.0, 41.3, 29.8, 48.5]
cpu_projected = [42.0, 40.0, 48.0, 44.0, 35.0, 46.0]
audits = [
{
"id": "mock-audit-uuid-1",
"name": "Mock audit (balanced)",
"created_at": "2025-02-01T10:00:00",
"strategy": "Balanced",
"goal": "BALANCED",
"type": "ONESHOT",
"scope": "Full Cluster",
"cpu_weight": "1.0",
"ram_weight": "1.0",
"migrations": json.dumps([
{
"instanceName": "instance-1",
"source": "compute-0",
"destination": "compute-3",
"flavor": "m1.small",
"impact": "Low",
}
]),
"host_labels": json.dumps(host_labels),
"cpu_current": json.dumps(cpu_current),
"cpu_projected": json.dumps(cpu_projected),
},
{
"id": "mock-audit-uuid-2",
"name": "Mock audit (workload consolidation)",
"created_at": "2025-02-02T14:30:00",
"strategy": "Workload consolidation",
"goal": "WORKLOAD_CONSOLIDATION",
"type": "ONESHOT",
"scope": "Full Cluster",
"cpu_weight": "1.0",
"ram_weight": "1.0",
"migrations": json.dumps([]),
"host_labels": json.dumps(host_labels),
"cpu_current": json.dumps(cpu_current),
"cpu_projected": json.dumps([40.0, 42.0, 50.0, 43.0, 36.0, 45.0]),
},
]
return {
"region": {
"name": "mock-region",
"hosts_total": hosts_total,
},
"pcpu": {
"total": pcpu_total,
"usage": pcpu_usage,
"free": pcpu_total - pcpu_usage,
"used_percentage": pcpu_usage / pcpu_total * 100,
},
"vcpu": {
"total": int(vcpu_total),
"allocated": vcpu_allocated,
"free": int(vcpu_total) - vcpu_allocated,
"allocated_percentage": vcpu_allocated / vcpu_total * 100,
"overcommit_ratio": vcpu_allocated / pcpu_total,
"overcommit_max": vcpu_overcommit_max,
},
"pram": {
"total": pram_total,
"usage": pram_usage,
"free": pram_total - pram_usage,
"used_percentage": pram_usage / pram_total * 100,
},
"vram": {
"total": vram_total,
"allocated": vram_allocated,
"free": vram_total - vram_allocated,
"allocated_percentage": vram_allocated / vram_total * 100,
"overcommit_ratio": vram_allocated / pram_total,
"overcommit_max": vram_overcommit_max,
},
"vm": {
"count": vm_count,
"active": vm_active,
"stopped": vm_count - vm_active,
"avg_cpu": vcpu_allocated / vm_count if vm_count else 0,
"avg_ram": vram_allocated / vm_count if vm_count else 0,
"density": vm_count / hosts_total,
},
"flavors": {
"first_common_flavor": {"name": "m1.small", "count": 12},
"second_common_flavor": {"name": "m1.medium", "count": 8},
"third_common_flavor": {"name": "m1.large", "count": 4},
},
"audits": audits,
}

View File

@@ -1,124 +1,126 @@
import pandas import pandas
from copy import copy from copy import copy
from openstack.connection import Connection from openstack.connection import Connection
from watcher_visio.settings import WATCHER_ENDPOINT_NAME, WATCHER_INTERFACE_NAME, PROMETHEUS_METRICS from watcher_visio.settings import WATCHER_ENDPOINT_NAME, WATCHER_INTERFACE_NAME, PROMETHEUS_METRICS
from dashboard.prometheus_utils.query import query_prometheus from dashboard.prometheus_utils.query import query_prometheus
def convert_cpu_data(data: list): def convert_cpu_data(data: list):
metrics = [] metrics = []
for entry in data: for entry in data:
for t, val in entry["values"]: for t, val in entry["values"]:
metrics.append({ metrics.append({
"timestamp": int(t), "timestamp": int(t),
"host": entry["metric"]["host"], "host": entry["metric"]["host"],
"cpu_usage": float(val), "cpu_usage": float(val),
"instance": entry["metric"]["instanceName"] "instance": entry["metric"]["instanceName"]
}) })
df_cpu = pandas.DataFrame(metrics) df_cpu = pandas.DataFrame(metrics)
df_cpu["timestamp"] = pandas.to_datetime(df_cpu["timestamp"], unit="s") df_cpu["timestamp"] = pandas.to_datetime(df_cpu["timestamp"], unit="s")
# Aggregate CPU usage per host # Aggregate CPU usage per host
return ( return (
df_cpu.groupby(["host", "timestamp"])["cpu_usage"].sum() df_cpu.groupby(["host", "timestamp"])["cpu_usage"].sum()
.groupby("host").mean() .groupby("host").mean()
.reset_index() .reset_index()
) )
def get_audits(connection: Connection) -> list[dict] | None: def get_audits(connection: Connection) -> list[dict] | None:
session = connection.session session = connection.session
watcher_endpoint = connection.endpoint_for( watcher_endpoint = connection.endpoint_for(
service_type=WATCHER_ENDPOINT_NAME, service_type=WATCHER_ENDPOINT_NAME,
interface=WATCHER_INTERFACE_NAME interface=WATCHER_INTERFACE_NAME
) )
# Collect instances prometheus metrics # Collect instances prometheus metrics
cpu_data = query_prometheus(PROMETHEUS_METRICS['cpu_usage']) cpu_data = query_prometheus(PROMETHEUS_METRICS['cpu_usage'])
cpu_metrics = convert_cpu_data(data=cpu_data) cpu_metrics = convert_cpu_data(data=cpu_data)
# Fetch audit list # Fetch audit list
audits_resp = session.get( audits_resp = session.get(
f"{watcher_endpoint}/v1/audits" f"{watcher_endpoint}/v1/audits"
) )
audits_resp.raise_for_status() audits_resp.raise_for_status()
audits_resp.json().get('audits') audits_resp = audits_resp.json().get('audits') or []
# Fetch action plan list # Fetch action plan list
actionplans_resp = session.get( actionplans_resp = session.get(
f"{watcher_endpoint}/v1/action_plans" f"{watcher_endpoint}/v1/action_plans"
) )
actionplans_resp.raise_for_status() actionplans_resp.raise_for_status()
actionplans_resp.json().get('action_plans') actionplans_resp = actionplans_resp.json().get('action_plans') or []
# Filtering audits by PENDING state # Filtering audits by PENDING state
pending_audits = [audit for audit in audits_resp if audit['state'] == "PENDING"] pending_audits = [plan for plan in actionplans_resp if plan['state'] == "RECOMMENDED"]
result = [] result = []
for item in pending_audits: for item in pending_audits:
projected_cpu_data = copy(cpu_data) projected_cpu_data = copy(cpu_data)
audit_resp = session.get( audit_resp = session.get(
f"{watcher_endpoint}/v1/audits/{item['uuid']}" f"{watcher_endpoint}/v1/audits/{item['audit_uuid']}"
) )
audit_resp.raise_for_status() audit_resp.raise_for_status()
audit_resp = audit_resp.json() audit_resp = audit_resp.json()
actionplan = next(filter(lambda x: x.get("audit_uuid") == audit_resp['uuid'], actionplans_resp), None) actionplan = next(filter(lambda x: x.get("audit_uuid") == audit_resp['uuid'], actionplans_resp), None)
if actionplan is None:
actions_resp = session.get( continue
f"{watcher_endpoint}/v1/actions/?action_plan_uuid={actionplan['uuid']}"
) actions_resp = session.get(
actions_resp.raise_for_status() f"{watcher_endpoint}/v1/actions/?action_plan_uuid={actionplan['uuid']}"
actions_resp = actions_resp.json().get('actions') )
actions_resp.raise_for_status()
migrations = [] actions_resp = actions_resp.json().get('actions') or []
mapping = {}
for action in actions_resp: migrations = []
action_resp = session.get( mapping = {}
f"{watcher_endpoint}/v1/actions/{action['uuid']}" for action in actions_resp:
) action_resp = session.get(
action_resp.raise_for_status() f"{watcher_endpoint}/v1/actions/{action['uuid']}"
action_resp = action_resp.json() )
action_resp.raise_for_status()
server = connection.get_server_by_id(action['input_parameters']['resource_id']) action_resp = action_resp.json()
params = action_resp['input_parameters']
mapping[params['resource_name']] = params['destination_node'] server = connection.get_server_by_id(action_resp['input_parameters']['resource_id'])
params = action_resp['input_parameters']
migrations.append({ mapping[params['resource_name']] = params['destination_node']
"instanceName": action['input_parameters']['resource_name'],
"source": action['input_parameters']['source_node'], migrations.append({
"destination": action['input_parameters']['destination_node'], "instanceName": action_resp['input_parameters']['resource_name'],
"flavor": server.flavor.name, "source": action_resp['input_parameters']['source_node'],
"impact": 'Low' "destination": action_resp['input_parameters']['destination_node'],
}) "flavor": server.flavor.name,
"impact": 'Low'
for entry in projected_cpu_data: })
if (instance := entry['metric']['instanceName']) in mapping:
entry['metric']['host'] = mapping[instance] for entry in projected_cpu_data:
if (instance := entry['metric']['instanceName']) in mapping:
projected_cpu_metrics = convert_cpu_data(projected_cpu_data) entry['metric']['host'] = mapping[instance]
result.append({ projected_cpu_metrics = convert_cpu_data(projected_cpu_data)
"id": audit_resp['uuid'],
"name": audit_resp['name'], result.append({
"created_at": audit_resp['created_at'], "id": audit_resp['uuid'],
"strategy": audit_resp['strategy_name'], "name": audit_resp['name'],
"goal": audit_resp['goal_name'], "created_at": audit_resp['created_at'],
"type": audit_resp['audit_type'], "strategy": audit_resp['strategy_name'],
"scope": audit_resp['scope'], "goal": audit_resp['goal_name'],
"cpu_weight": audit_resp['parameters'].get('weights', {}).get('instance_cpu_usage_weight', "none"), "type": audit_resp['audit_type'],
"ram_weight": audit_resp['parameters'].get('weights', {}).get('instance_ram_usage_weight', "none"), "scope": audit_resp['scope'],
"migrations": migrations, "cpu_weight": audit_resp['parameters'].get('weights', {}).get('instance_cpu_usage_weight', "none"),
"host_labels": cpu_metrics['host'].to_list(), "ram_weight": audit_resp['parameters'].get('weights', {}).get('instance_ram_usage_weight', "none"),
"cpu_current": cpu_metrics['cpu_usage'].to_list(), "migrations": migrations,
"cpu_projected": projected_cpu_metrics['cpu_usage'].to_list(), "host_labels": cpu_metrics['host'].to_list(),
}) "cpu_current": cpu_metrics['cpu_usage'].to_list(),
"cpu_projected": projected_cpu_metrics['cpu_usage'].to_list(),
return result })
return result

View File

@@ -1,8 +1,8 @@
import openstack import openstack
from openstack.connection import Connection from openstack.connection import Connection
from watcher_visio.settings import OPENSTACK_CLOUD, OPENSTACK_REGION_NAME from watcher_visio.settings import OPENSTACK_CLOUD, OPENSTACK_REGION_NAME
def get_connection() -> Connection: def get_connection() -> Connection:
connection = openstack.connect(cloud=OPENSTACK_CLOUD, region_name=OPENSTACK_REGION_NAME) connection = openstack.connect(cloud=OPENSTACK_CLOUD, region_name=OPENSTACK_REGION_NAME)
return connection return connection

View File

@@ -1,20 +1,23 @@
from openstack.connection import Connection from openstack.connection import Connection
from collections import Counter from collections import Counter
def get_flavor_list(connection: Connection) -> dict: def get_flavor_list(connection: Connection) -> dict:
servers = list(connection.compute.servers(all_projects=True)) servers = list(connection.compute.servers(all_projects=True))
flavor_ids = [s.flavor['id'] for s in servers if 'id' in s.flavor] flavor_ids = [s.flavor['id'] for s in servers if 'id' in s.flavor]
flavor_count = Counter(flavor_ids).most_common() flavor_count = Counter(flavor_ids).most_common()
flavors = list(flavor_count) flavors = list(flavor_count)
result = {} result = {}
for idx, prefix in [(0, "first"), (1, "second"), (2, "third")]: placeholder = {"name": "", "count": 0}
if len(flavors) > idx: for idx, prefix in [(0, "first"), (1, "second"), (2, "third")]:
result[f"{prefix}_common_flavor"] = { if len(flavors) > idx:
"name": flavors[idx][0], result[f"{prefix}_common_flavor"] = {
"count": flavors[idx][1] "name": flavors[idx][0],
} "count": flavors[idx][1]
}
return result else:
result[f"{prefix}_common_flavor"] = placeholder
return result

View File

@@ -1,16 +1,16 @@
import requests import requests
from watcher_visio.settings import PROMETHEUS_URL from watcher_visio.settings import PROMETHEUS_URL
def query_prometheus(query: str) -> str | list[str]: def query_prometheus(query: str) -> str | list[str]:
url = f"{PROMETHEUS_URL}/api/v1/query" url = f"{PROMETHEUS_URL}/api/v1/query"
params = { params = {
"query": query, "query": query,
} }
response = requests.get(url=url, params=params) response = requests.get(url=url, params=params)
response.raise_for_status() response.raise_for_status()
result = response.json()["data"]["result"] result = response.json()["data"]["result"]
if len(result) > 1: if len(result) > 1:
return result[0]["value"][1] return result
else: else:
return result[0]["values"] return result[0]["value"][1]

View File

@@ -1,56 +1,56 @@
from django import template from django import template
register = template.Library() register = template.Library()
@register.filter @register.filter
def div(a, b): def div(a, b):
try: try:
return float(a) / float(b) return float(a) / float(b)
except: except:
return 0 return 0
@register.filter @register.filter
def mul(a, b): def mul(a, b):
try: try:
return float(a) * float(b) return float(a) * float(b)
except: except:
return 0 return 0
@register.filter @register.filter
def sub(a, b): def sub(a, b):
try: try:
return float(a) - float(b) return float(a) - float(b)
except: except:
return 0 return 0
@register.filter @register.filter
def convert_bytes(bytes_value, target_unit='GB'): def convert_bytes(bytes_value, target_unit='GB'):
""" """
Convert bytes to specific unit Convert bytes to specific unit
Args: Args:
bytes_value: Size in bytes bytes_value: Size in bytes
target_unit: Target unit ('B', 'KB', 'MB', 'GB', 'TB') target_unit: Target unit ('B', 'KB', 'MB', 'GB', 'TB')
precision: Number of decimal places precision: Number of decimal places
Returns: Returns:
Float value in target unit Float value in target unit
""" """
try: try:
bytes_value = float(bytes_value) bytes_value = float(bytes_value)
except (ValueError, TypeError): except (ValueError, TypeError):
return 0.0 return 0.0
conversion_factors = { conversion_factors = {
'B': 1, 'B': 1,
'KB': 1024, 'KB': 1024,
'MB': 1024 * 1024, 'MB': 1024 * 1024,
'GB': 1024 * 1024 * 1024, 'GB': 1024 * 1024 * 1024,
'TB': 1024 * 1024 * 1024 * 1024, 'TB': 1024 * 1024 * 1024 * 1024,
} }
target_unit = target_unit.upper() target_unit = target_unit.upper()
if target_unit not in conversion_factors: if target_unit not in conversion_factors:
target_unit = 'MB' target_unit = 'MB'
result = bytes_value / conversion_factors[target_unit] result = bytes_value / conversion_factors[target_unit]
return round(result, 1) return round(result, 1)

View File

@@ -0,0 +1 @@
# Dashboard test package

View File

@@ -0,0 +1,103 @@
"""Tests for dashboard.templatetags.mathfilters."""
from django.test import TestCase
from django.template import Template, Context
from dashboard.templatetags.mathfilters import div, mul, sub, convert_bytes
class DivFilterTest(TestCase):
"""Tests for the div template filter."""
def test_div_normal(self):
self.assertEqual(div(10, 2), 5.0)
self.assertEqual(div(10.0, 4), 2.5)
def test_div_by_zero(self):
self.assertEqual(div(10, 0), 0)
def test_div_non_numeric(self):
self.assertEqual(div("x", 2), 0)
self.assertEqual(div(10, "y"), 0)
self.assertEqual(div(None, 2), 0)
class MulFilterTest(TestCase):
"""Tests for the mul template filter."""
def test_mul_normal(self):
self.assertEqual(mul(3, 4), 12.0)
self.assertEqual(mul(2.5, 4), 10.0)
def test_mul_non_numeric(self):
self.assertEqual(mul("a", 2), 0)
self.assertEqual(mul(2, None), 0)
class SubFilterTest(TestCase):
"""Tests for the sub template filter."""
def test_sub_normal(self):
self.assertEqual(sub(10, 3), 7.0)
self.assertEqual(sub(5.5, 2), 3.5)
def test_sub_non_numeric(self):
self.assertEqual(sub("x", 1), 0)
self.assertEqual(sub(5, "y"), 0)
class ConvertBytesFilterTest(TestCase):
"""Tests for the convert_bytes template filter."""
def test_convert_to_B(self):
self.assertEqual(convert_bytes(1024, "B"), 1024.0)
def test_convert_to_KB(self):
self.assertEqual(convert_bytes(2048, "KB"), 2.0)
def test_convert_to_MB(self):
self.assertEqual(convert_bytes(1024 * 1024 * 3, "MB"), 3.0)
def test_convert_to_GB(self):
self.assertEqual(convert_bytes(1024 ** 3 * 5, "GB"), 5.0)
def test_convert_to_TB(self):
self.assertEqual(convert_bytes(1024 ** 4, "TB"), 1.0)
def test_convert_default_GB(self):
self.assertEqual(convert_bytes(1024 ** 3 * 2), 2.0)
def test_convert_invalid_unit_fallback_to_MB(self):
self.assertEqual(convert_bytes(1024 * 1024, "invalid"), 1.0)
self.assertEqual(convert_bytes(1024 * 1024, "xyz"), 1.0)
def test_convert_non_numeric_returns_zero(self):
self.assertEqual(convert_bytes("abc"), 0.0)
self.assertEqual(convert_bytes(None), 0.0)
def test_convert_rounds_to_one_decimal(self):
self.assertEqual(convert_bytes(1500 * 1024 * 1024, "GB"), 1.5)
self.assertEqual(convert_bytes(1536 * 1024 * 1024, "GB"), 1.5)
def test_convert_case_insensitive_unit(self):
self.assertEqual(convert_bytes(1024 ** 3, "gb"), 1.0)
self.assertEqual(convert_bytes(1024 ** 3, "GB"), 1.0)
class MathfiltersTemplateIntegrationTest(TestCase):
"""Test filters via template rendering."""
def test_div_in_template(self):
t = Template("{% load mathfilters %}{{ a|div:b }}")
self.assertEqual(t.render(Context({"a": 10, "b": 2})), "5.0")
def test_mul_in_template(self):
t = Template("{% load mathfilters %}{{ a|mul:b }}")
self.assertEqual(t.render(Context({"a": 3, "b": 4})), "12.0")
def test_sub_in_template(self):
t = Template("{% load mathfilters %}{{ a|sub:b }}")
self.assertEqual(t.render(Context({"a": 10, "b": 3})), "7.0")
def test_convert_bytes_in_template(self):
t = Template("{% load mathfilters %}{{ bytes|convert_bytes:'GB' }}")
self.assertEqual(t.render(Context({"bytes": 1024 ** 3 * 2})), "2.0")

View File

@@ -0,0 +1,107 @@
"""Tests for dashboard.mock_data."""
import json
from django.test import TestCase
from dashboard.mock_data import get_mock_context
class GetMockContextTest(TestCase):
"""Tests for get_mock_context()."""
def test_returns_all_top_level_keys(self):
ctx = get_mock_context()
expected_keys = {"region", "pcpu", "vcpu", "pram", "vram", "vm", "flavors", "audits"}
self.assertEqual(set(ctx.keys()), expected_keys)
def test_region_structure(self):
ctx = get_mock_context()
region = ctx["region"]
self.assertIn("name", region)
self.assertIn("hosts_total", region)
self.assertEqual(region["name"], "mock-region")
self.assertEqual(region["hosts_total"], 6)
def test_pcpu_structure_and_types(self):
ctx = get_mock_context()
pcpu = ctx["pcpu"]
self.assertEqual(pcpu["total"], 48)
self.assertEqual(pcpu["usage"], 12.5)
self.assertEqual(pcpu["free"], 48 - 12.5)
self.assertIsInstance(pcpu["used_percentage"], (int, float))
def test_vcpu_structure(self):
ctx = get_mock_context()
vcpu = ctx["vcpu"]
self.assertIn("total", vcpu)
self.assertIn("allocated", vcpu)
self.assertIn("free", vcpu)
self.assertIn("allocated_percentage", vcpu)
self.assertIn("overcommit_ratio", vcpu)
self.assertIn("overcommit_max", vcpu)
self.assertEqual(vcpu["overcommit_max"], 2.0)
def test_pram_vram_structure(self):
ctx = get_mock_context()
pram = ctx["pram"]
vram = ctx["vram"]
self.assertIn("total", pram)
self.assertIn("usage", pram)
self.assertIn("free", pram)
self.assertIn("used_percentage", pram)
self.assertIn("total", vram)
self.assertIn("allocated", vram)
self.assertIn("overcommit_max", vram)
def test_vm_structure(self):
ctx = get_mock_context()
vm = ctx["vm"]
self.assertEqual(vm["count"], 24)
self.assertEqual(vm["active"], 22)
self.assertEqual(vm["stopped"], 2)
self.assertIn("avg_cpu", vm)
self.assertIn("avg_ram", vm)
self.assertIn("density", vm)
def test_flavors_structure(self):
ctx = get_mock_context()
flavors = ctx["flavors"]
for key in ("first_common_flavor", "second_common_flavor", "third_common_flavor"):
self.assertIn(key, flavors)
self.assertIn("name", flavors[key])
self.assertIn("count", flavors[key])
self.assertEqual(flavors["first_common_flavor"]["name"], "m1.small")
self.assertEqual(flavors["first_common_flavor"]["count"], 12)
def test_audits_serialized_fields(self):
ctx = get_mock_context()
self.assertIsInstance(ctx["audits"], list)
self.assertGreaterEqual(len(ctx["audits"]), 1)
for audit in ctx["audits"]:
self.assertIn("migrations", audit)
self.assertIn("host_labels", audit)
self.assertIn("cpu_current", audit)
self.assertIn("cpu_projected", audit)
# These must be JSON strings (render-ready for JS)
self.assertIsInstance(audit["migrations"], str)
self.assertIsInstance(audit["host_labels"], str)
self.assertIsInstance(audit["cpu_current"], str)
self.assertIsInstance(audit["cpu_projected"], str)
# Must be valid JSON
json.loads(audit["migrations"])
json.loads(audit["host_labels"])
json.loads(audit["cpu_current"])
json.loads(audit["cpu_projected"])
def test_audits_metadata_fields(self):
ctx = get_mock_context()
audit = ctx["audits"][0]
self.assertIn("id", audit)
self.assertIn("name", audit)
self.assertIn("created_at", audit)
self.assertIn("strategy", audit)
self.assertIn("goal", audit)
self.assertIn("type", audit)
self.assertIn("scope", audit)
self.assertIn("cpu_weight", audit)
self.assertIn("ram_weight", audit)

View File

@@ -1,6 +1,6 @@
from django.urls import path from django.urls import path
from . import views from . import views
urlpatterns = [ urlpatterns = [
path('', views.index, name='index'), path('', views.index, name='index'),
] ]

View File

@@ -1,332 +1,143 @@
import json import json
from concurrent.futures import ThreadPoolExecutor, as_completed
from django.shortcuts import render
from dashboard.openstack_utils.connect import get_connection from django.conf import settings
from dashboard.openstack_utils.flavor import get_flavor_list from django.core.cache import cache
from dashboard.prometheus_utils.query import query_prometheus from django.shortcuts import render
from dashboard.openstack_utils.audits import get_audits from dashboard.openstack_utils.connect import get_connection
from dashboard.openstack_utils.flavor import get_flavor_list
def collect_context(): from dashboard.prometheus_utils.query import query_prometheus
connection = get_connection() from dashboard.openstack_utils.audits import get_audits
region_name = connection._compute_region from dashboard.mock_data import get_mock_context
flavors = get_flavor_list(connection=connection)
audits = get_audits(connection=connection) # Prometheus queries run in parallel (query_key -> query string)
hosts_total = int( _PROMETHEUS_QUERIES = {
query_prometheus( "hosts_total": "count(node_exporter_build_info{job='node_exporter_compute'})",
query="count(node_exporter_build_info{job='node_exporter_compute'})" "pcpu_total": "sum(count(node_cpu_seconds_total{job='node_exporter_compute', mode='idle'}) without (cpu,mode))",
) "pcpu_usage": "sum(node_load5{job='node_exporter_compute'})",
) "vcpu_allocated": "sum(libvirt_domain_info_virtual_cpus)",
pcpu_total = int( "vcpu_overcommit_max": "avg(openstack_placement_resource_allocation_ratio{resourcetype='VCPU'})",
query_prometheus( "pram_total": "sum(node_memory_MemTotal_bytes{job='node_exporter_compute'})",
query="sum(count(node_cpu_seconds_total{job='node_exporter_compute', mode='idle'}) without (cpu,mode))" "pram_usage": "sum(node_memory_Active_bytes{job='node_exporter_compute'})",
) "vram_allocated": "sum(libvirt_domain_info_maximum_memory_bytes)",
) "vram_overcommit_max": "avg(avg_over_time(openstack_placement_resource_allocation_ratio{resourcetype='MEMORY_MB'}[5m]))",
pcpu_usage = float( "vm_count": "sum(libvirt_domain_state_code)",
query_prometheus( "vm_active": "sum(libvirt_domain_state_code{stateDesc='the domain is running'})",
query="sum(node_load5{job='node_exporter_compute'})" }
)
)
vcpu_allocated = int( def _fetch_prometheus_metrics():
query_prometheus( """Run all Prometheus queries in parallel and return a dict of name -> value."""
query="sum(libvirt_domain_info_virtual_cpus)" result = {}
) with ThreadPoolExecutor(max_workers=len(_PROMETHEUS_QUERIES)) as executor:
) future_to_key = {
vcpu_overcommit_max = float( executor.submit(query_prometheus, query=q): key
query_prometheus( for key, q in _PROMETHEUS_QUERIES.items()
query="avg(openstack_placement_resource_allocation_ratio{resourcetype='VCPU'})" }
) for future in as_completed(future_to_key):
) key = future_to_key[future]
pram_total = int( try:
query_prometheus( raw = future.result()
query="sum(node_memory_MemTotal_bytes{job='node_exporter_compute'})" # memory in bytes if key in ("pcpu_usage", "vcpu_overcommit_max", "vram_overcommit_max"):
) result[key] = float(raw)
) else:
pram_usage = int ( result[key] = int(raw)
query_prometheus( except (ValueError, TypeError):
query="sum(node_memory_Active_bytes{job='node_exporter_compute'})" result[key] = 0 if key in ("pcpu_usage", "vcpu_overcommit_max", "vram_overcommit_max") else 0
) return result
)
vram_allocated = int(
query_prometheus( def collect_context():
query="sum(libvirt_domain_info_maximum_memory_bytes)" connection = get_connection()
) region_name = connection._compute_region
) flavors = get_flavor_list(connection=connection)
vram_overcommit_max = float( audits = get_audits(connection=connection)
query_prometheus(
query="avg(avg_over_time(openstack_placement_resource_allocation_ratio{resourcetype='MEMORY_MB'}[5m]))" metrics = _fetch_prometheus_metrics()
) hosts_total = metrics.get("hosts_total") or 1
) pcpu_total = metrics.get("pcpu_total", 0)
vm_count = int( pcpu_usage = metrics.get("pcpu_usage", 0)
query_prometheus( vcpu_allocated = metrics.get("vcpu_allocated", 0)
query="sum(libvirt_domain_state_code)" vcpu_overcommit_max = metrics.get("vcpu_overcommit_max", 0)
) pram_total = metrics.get("pram_total", 0)
) pram_usage = metrics.get("pram_usage", 0)
vm_active = int( vram_allocated = metrics.get("vram_allocated", 0)
query_prometheus( vram_overcommit_max = metrics.get("vram_overcommit_max", 0)
query="sum(libvirt_domain_state_code{stateDesc='the domain is running'})" vm_count = metrics.get("vm_count", 0)
) vm_active = metrics.get("vm_active", 0)
)
vcpu_total = pcpu_total * vcpu_overcommit_max
vcpu_total = pcpu_total * vcpu_overcommit_max vram_total = pram_total * vram_overcommit_max
vram_total = pram_total * vram_overcommit_max
context = {
context = { # <--- Region data --->
# <--- Region data ---> "region": {
"region": { "name": region_name,
"name": region_name, "hosts_total": hosts_total,
"hosts_total": 6, },
}, # <--- CPU data --->
# <--- CPU data ---> # pCPU data
# pCPU data "pcpu": {
"pcpu": { "total": pcpu_total,
"total": pcpu_total, "usage": pcpu_usage,
"usage": pcpu_usage, "free": pcpu_total - pcpu_usage,
"free": pcpu_total - pcpu_usage, "used_percentage": (pcpu_usage / pcpu_total * 100) if pcpu_total else 0,
"used_percentage": pcpu_usage / pcpu_total * 100, },
}, # vCPU data
# vCPU data "vcpu": {
"vcpu": { "total": vcpu_total,
"total": vcpu_total, "allocated": vcpu_allocated,
"allocated": vcpu_allocated, "free": vcpu_total - vcpu_allocated,
"free": vcpu_total - vcpu_allocated, "allocated_percentage": (vcpu_allocated / vcpu_total * 100) if vcpu_total else 0,
"allocated_percentage": vcpu_allocated / vcpu_total * 100, "overcommit_ratio": (vcpu_allocated / pcpu_total) if pcpu_total else 0,
"overcommit_ratio": vcpu_allocated / pcpu_total, "overcommit_max": vcpu_overcommit_max,
"overcommit_max": vcpu_overcommit_max, },
}, # <--- RAM data --->
# <--- RAM data ---> # pRAM data
# pRAM data "pram": {
"pram" : { "total": pram_total,
"total": pram_total, "usage": pram_usage,
"usage": pram_usage, "free": pram_total - pram_usage,
"free": pram_total - pram_usage, "used_percentage": (pram_usage / pram_total * 100) if pram_total else 0,
"used_percentage": pram_usage / pram_total * 100, },
}, # vRAM data
# vRAM data "vram": {
"vram": { "total": vram_total,
"total": vram_total, "allocated": vram_allocated,
"allocated": vram_allocated, "free": vram_total - vram_allocated,
"free": vram_total - vram_allocated, "allocated_percentage": (vram_allocated / vram_total * 100) if vram_total else 0,
"allocated_percentage": vram_allocated / vram_total * 100, "overcommit_ratio": (vram_allocated / pram_total) if pram_total else 0,
"overcommit_ratio": vram_allocated / pram_total, "overcommit_max": vram_overcommit_max,
"overcommit_max": vram_overcommit_max, },
}, # <--- VM data --->
# <--- VM data ---> "vm": {
"vm": { "count": vm_count,
"count": vm_count, "active": vm_active,
"active": vm_active, "stopped": vm_count - vm_active,
"stopped": vm_count - vm_active, "avg_cpu": vcpu_allocated / vm_count if vm_count else 0,
"avg_cpu": vcpu_allocated / vm_count, "avg_ram": vram_allocated / vm_count if vm_count else 0,
"avg_ram": vram_allocated / vm_count, "density": vm_count / hosts_total if hosts_total else 0,
"density": vm_count / hosts_total, },
}, "flavors": flavors,
"flavors": flavors, "audits": audits,
"audits": audits, }
} # Serialize audit list fields for JavaScript so cached context is render-ready
return context for audit in context["audits"]:
audit["migrations"] = json.dumps(audit["migrations"])
def index(request): audit["host_labels"] = json.dumps(audit["host_labels"])
hosts_total = 6 audit["cpu_current"] = json.dumps(audit["cpu_current"])
pcpu_total = 672 audit["cpu_projected"] = json.dumps(audit["cpu_projected"])
pcpu_usage = 39.2 return context
vcpu_total = 3360
vcpu_allocated = 98 def index(request):
vcpu_overcommit_max = 5 if getattr(settings, "USE_MOCK_DATA", False):
pram_total = 562500000000 context = get_mock_context()
pram_usage = 4325000000 return render(request, "index.html", context)
vram_total = 489375000000
vram_allocated = 5625000000 cache_key = "dashboard_context"
vram_overcommit_max = 0.87 cache_ttl = getattr(settings, "DASHBOARD_CACHE_TTL", 120)
vm_count = 120 context = cache.get(cache_key)
vm_active = 90 if context is None:
context = { context = collect_context()
# <--- Region data ---> cache.set(cache_key, context, timeout=cache_ttl)
"region": { return render(request, "index.html", context)
"name": "ct3k1ldt",
"hosts_total": 6,
},
# <--- CPU data --->
# pCPU data
"pcpu": {
"total": pcpu_total,
"usage": pcpu_usage,
"free": pcpu_total - pcpu_usage,
"used_percentage": pcpu_usage / pcpu_total * 100,
},
# vCPU data
"vcpu": {
"total": vcpu_total,
"allocated": vcpu_allocated,
"free": vcpu_total - vcpu_allocated,
"allocated_percentage": vcpu_allocated / vcpu_total * 100,
"overcommit_ratio": vcpu_allocated / pcpu_total,
"overcommit_max": vcpu_overcommit_max,
},
# <--- RAM data --->
# pRAM data
"pram" : {
"total": pram_total,
"usage": pram_usage,
"free": pram_total - pram_usage,
"used_percentage": pram_usage / pram_total * 100,
},
# vRAM data
"vram": {
"total": vram_total,
"allocated": vram_allocated,
"free": vram_total - vram_allocated,
"allocated_percentage": vram_allocated / vram_total * 100,
"overcommit_ratio": vram_allocated / pram_total,
"overcommit_max": vram_overcommit_max,
},
# <--- VM data --->
"vm": {
"count": vm_count,
"active": vm_active,
"stopped": vm_count - vm_active,
"avg_cpu": vcpu_allocated / vm_count,
"avg_ram": vram_allocated / vm_count,
"density": vm_count / hosts_total,
},
"flavors": {
'first_common_flavor': {
'name': 'm1.medium',
'count': 18
},
'second_common_flavor': {
'name': 'm1.small',
'count': 12
},
'third_common_flavor': {
'name': 'm1.large',
'count': 8
},
},
# Audit data
'audits': [
{
'id': 'audit_001',
'name': 'Weekly Optimization',
'created_at': '2024-01-15',
'cpu_weight': 1.2,
'ram_weight': 0.6,
'scope': 'Full Cluster',
'strategy': 'Load Balancing',
'goal': 'Optimize CPU distribution across all hosts',
'migrations': [
{
'instanceName': 'web-server-01',
'source': 'compute-02',
'destination': 'compute-05',
'flavor': 'm1.medium',
'impact': 'Low'
},
{
'instanceName': 'db-replica-03',
'source': 'compute-01',
'destination': 'compute-04',
'flavor': 'm1.large',
'impact': 'Medium'
},
{
'instanceName': 'api-gateway',
'source': 'compute-03',
'destination': 'compute-06',
'flavor': 'm1.small',
'impact': 'Low'
},
{
'instanceName': 'cache-node-02',
'source': 'compute-01',
'destination': 'compute-07',
'flavor': 'm1.small',
'impact': 'Low'
},
{
'instanceName': 'monitoring-server',
'source': 'compute-04',
'destination': 'compute-02',
'flavor': 'm1.medium',
'impact': 'Low'
}
],
'host_labels': ['compute-01', 'compute-02', 'compute-03', 'compute-04', 'compute-05', 'compute-06', 'compute-07'],
'cpu_current': [78, 65, 42, 89, 34, 56, 71],
'cpu_projected': [65, 58, 45, 72, 48, 61, 68]
},
{
'id': 'audit_002',
'name': 'Emergency Rebalance',
'created_at': '2024-01-14',
'cpu_weight': 1.0,
'ram_weight': 1.0,
'scope': 'Overloaded Hosts',
'strategy': 'Hotspot Reduction',
'goal': 'Reduce load on compute-01 and compute-04',
'migrations': [
{
'instanceName': 'app-server-02',
'source': 'compute-01',
'destination': 'compute-06',
'flavor': 'm1.medium',
'impact': 'Medium'
},
{
'instanceName': 'file-server-01',
'source': 'compute-04',
'destination': 'compute-07',
'flavor': 'm1.large',
'impact': 'High'
}
],
'host_labels': ['compute-01', 'compute-02', 'compute-03', 'compute-04', 'compute-05', 'compute-06', 'compute-07'],
'cpu_current': [92, 65, 42, 85, 34, 56, 71],
'cpu_projected': [72, 65, 42, 65, 34, 66, 81]
},
{
'id': 'audit_003',
'name': 'Pre-Maintenance Planning',
'created_at': '2024-01-10',
'cpu_weight': 0.8,
'ram_weight': 1.5,
'scope': 'Maintenance Zone',
'strategy': 'Evacuation',
'goal': 'Empty compute-03 for maintenance',
'migrations': [
{
'instanceName': 'test-vm-01',
'source': 'compute-03',
'destination': 'compute-02',
'flavor': 'm1.small',
'impact': 'Low'
},
{
'instanceName': 'dev-server',
'source': 'compute-03',
'destination': 'compute-05',
'flavor': 'm1.medium',
'impact': 'Low'
},
{
'instanceName': 'staging-db',
'source': 'compute-03',
'destination': 'compute-07',
'flavor': 'm1.large',
'impact': 'High'
}
],
'host_labels': ['compute-01', 'compute-02', 'compute-03', 'compute-04', 'compute-05', 'compute-06', 'compute-07'],
'cpu_current': [78, 65, 56, 89, 34, 56, 71],
'cpu_projected': [78, 75, 0, 89, 54, 56, 81]
}
]
}
# Serialize lists for JavaScript
for audit in context['audits']:
audit['migrations'] = json.dumps(audit['migrations'])
audit['host_labels'] = json.dumps(audit['host_labels'])
audit['cpu_current'] = json.dumps(audit['cpu_current'])
audit['cpu_projected'] = json.dumps(audit['cpu_projected'])
return render(request, 'index.html', context)

22
docker-compose.dev.yml Normal file
View File

@@ -0,0 +1,22 @@
# Development override: use with
# docker compose -f docker-compose.yml -f docker-compose.dev.yml up --build
#
# Uses mock data (no OpenStack/Prometheus), mounts code for live reload.
services:
watcher-visio:
build:
context: .
dockerfile: Dockerfile
volumes:
- .:/app
environment:
- USE_MOCK_DATA=true
- DEBUG=true
- PYTHONUNBUFFERED=1
ports:
- "8000:8000"
# Optional: skip entrypoint migrations on every start for faster restart
# command: ["python3", "manage.py", "runserver", "0.0.0.0:8000"]
stdin_open: true
tty: true

View File

@@ -1,8 +1,24 @@
services: # Base compose: production-like run.
watcher-visio: # For development with mock data and live reload use:
build: . # docker compose -f docker-compose.yml -f docker-compose.dev.yml up --build
ports:
- "8000:8000" services:
volumes: watcher-visio:
- ./:/app image: watcher-visio:latest
restart: unless-stopped build:
context: .
dockerfile: Dockerfile
container_name: watcher-visio
ports:
- "8000:8000"
environment:
- PYTHONUNBUFFERED=1
# Override via environment or env_file (e.g. env_file: .env):
# PROMETHEUS_URL, OPENSTACK_CLOUD, OPENSTACK_REGION_NAME, SECRET_KEY
healthcheck:
test: ["CMD", "curl", "-f", "http://127.0.0.1:8000/"]
interval: 30s
timeout: 10s
retries: 3
start_period: 15s
restart: unless-stopped

View File

@@ -1,12 +1,11 @@
#!/bin/sh #!/bin/sh
set -e
set -e
echo "Applying database migrations..."
echo "Applying database migrations..." python3 manage.py migrate --noinput
python manage.py migrate --noinput
echo "Collecting static files..."
echo "Collecting static files..." python3 manage.py collectstatic --noinput
python manage.py collectstatic --noinput
echo "Starting Django application..."
echo "Starting Django application..."
exec "$@" exec "$@"

View File

@@ -1,22 +1,22 @@
#!/usr/bin/env python #!/usr/bin/env python
"""Django's command-line utility for administrative tasks.""" """Django's command-line utility for administrative tasks."""
import os import os
import sys import sys
def main(): def main():
"""Run administrative tasks.""" """Run administrative tasks."""
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'watcher_visio.settings') os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'watcher_visio.settings')
try: try:
from django.core.management import execute_from_command_line from django.core.management import execute_from_command_line
except ImportError as exc: except ImportError as exc:
raise ImportError( raise ImportError(
"Couldn't import Django. Are you sure it's installed and " "Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you " "available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?" "forget to activate a virtual environment?"
) from exc ) from exc
execute_from_command_line(sys.argv) execute_from_command_line(sys.argv)
if __name__ == '__main__': if __name__ == '__main__':
main() main()

2733
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,28 +1,28 @@
{ {
"name": "watcher-visio", "name": "watcher-visio",
"version": "1.0.0", "version": "1.0.0",
"description": "", "description": "",
"main": "index.js", "main": "index.js",
"scripts": { "scripts": {
"build": "npx @tailwindcss/cli -i ./static/css/main.css -o ./static/css/output.css --minify", "build": "npx @tailwindcss/cli -i ./static/css/main.css -o ./static/css/output.css --minify",
"dev": "npx @tailwindcss/cli -i ./static/css/main.css -o ./static/css/output.css --watch" "dev": "npx @tailwindcss/cli -i ./static/css/main.css -o ./static/css/output.css --watch"
}, },
"repository": { "repository": {
"type": "git", "type": "git",
"url": "https://git.arnike.ru/Arnike/watcher-visio.git" "url": "https://git.arnike.ru/Arnike/watcher-visio.git"
}, },
"keywords": [], "keywords": [],
"author": "", "author": "",
"license": "ISC", "license": "ISC",
"type": "commonjs", "type": "commonjs",
"devDependencies": { "devDependencies": {
"@tailwindcss/typography": "^0.5.19", "@tailwindcss/typography": "^0.5.19",
"autoprefixer": "^10.4.22", "autoprefixer": "^10.4.22",
"daisyui": "^5.5.5", "daisyui": "^5.5.5",
"postcss": "^8.5.6", "postcss": "^8.5.6",
"tailwindcss": "^4.1.17" "tailwindcss": "^4.1.17"
}, },
"dependencies": { "dependencies": {
"@tailwindcss/cli": "^4.1.17" "@tailwindcss/cli": "^4.1.17"
} }
} }

View File

@@ -1,33 +1,33 @@
asgiref==3.11.0 asgiref==3.11.0
certifi==2025.11.12 certifi==2025.11.12
cffi==2.0.0 cffi==2.0.0
charset-normalizer==3.4.4 charset-normalizer==3.4.4
cryptography==46.0.3 cryptography==46.0.3
decorator==5.2.1 decorator==5.2.1
Django==5.2.8 Django==5.2.8
dogpile.cache==1.5.0 dogpile.cache==1.5.0
idna==3.11 idna==3.11
iso8601==2.1.0 iso8601==2.1.0
jmespath==1.0.1 jmespath==1.0.1
jsonpatch==1.33 jsonpatch==1.33
jsonpointer==3.0.0 jsonpointer==3.0.0
keystoneauth1==5.12.0 keystoneauth1==5.12.0
numpy==2.3.5 numpy==2.3.5
openstacksdk==4.8.0 openstacksdk==4.8.0
os-service-types==1.8.2 os-service-types==1.8.2
pandas==2.3.3 pandas==2.3.3
pbr==7.0.3 pbr==7.0.3
platformdirs==4.5.0 platformdirs==4.5.0
psutil==7.1.3 psutil==7.1.3
pycparser==2.23 pycparser==2.23
python-dateutil==2.9.0.post0 python-dateutil==2.9.0.post0
pytz==2025.2 pytz==2025.2
PyYAML==6.0.3 PyYAML==6.0.3
requests==2.32.5 requests==2.32.5
requestsexceptions==1.4.0 requestsexceptions==1.4.0
six==1.17.0 six==1.17.0
sqlparse==0.5.4 sqlparse==0.5.4
stevedore==5.6.0 stevedore==5.6.0
typing_extensions==4.15.0 typing_extensions==4.15.0
tzdata==2025.2 tzdata==2025.2
urllib3==2.5.0 urllib3==2.5.0

View File

@@ -1,39 +1,39 @@
@import "tailwindcss"; @import "tailwindcss";
@plugin "daisyui"; @plugin "daisyui";
@plugin "daisyui/theme" { @plugin "daisyui/theme" {
name: "light"; name: "light";
default: true; default: true;
prefersdark: false; prefersdark: false;
color-scheme: "light"; color-scheme: "light";
--color-base-100: oklch(100% 0 0); --color-base-100: oklch(100% 0 0);
--color-base-200: oklch(98% 0 0); --color-base-200: oklch(98% 0 0);
--color-base-300: oklch(95% 0 0); --color-base-300: oklch(95% 0 0);
--color-base-content: oklch(21% 0.006 285.885); --color-base-content: oklch(21% 0.006 285.885);
--color-primary: #09418E; --color-primary: #0A2896;
--color-primary-content: oklch(93% 0.034 272.788); --color-primary-content: oklch(93% 0.034 272.788);
--color-secondary: #428BCA; --color-secondary: #428BCA;
--color-secondary-content: oklch(100% 0 0); --color-secondary-content: oklch(100% 0 0);
--color-accent: #A492FF; --color-accent: #A492FF;
--color-accent-content: oklch(21% 0.006 285.885); --color-accent-content: oklch(21% 0.006 285.885);
--color-neutral: #333333; --color-neutral: #333333;
--color-neutral-content: oklch(92% 0.004 286.32); --color-neutral-content: oklch(92% 0.004 286.32);
--color-info: oklch(74% 0.16 232.661); --color-info: oklch(74% 0.16 232.661);
--color-info-content: oklch(29% 0.066 243.157); --color-info-content: oklch(29% 0.066 243.157);
--color-success: oklch(76% 0.177 163.223); --color-success: oklch(76% 0.177 163.223);
--color-success-content: oklch(37% 0.077 168.94); --color-success-content: oklch(37% 0.077 168.94);
--color-warning: oklch(82% 0.189 84.429); --color-warning: oklch(82% 0.189 84.429);
--color-warning-content: oklch(41% 0.112 45.904); --color-warning-content: oklch(41% 0.112 45.904);
--color-error: oklch(71% 0.194 13.428); --color-error: oklch(71% 0.194 13.428);
--color-error-content: oklch(27% 0.105 12.094); --color-error-content: oklch(27% 0.105 12.094);
--radius-selector: 0.5rem; --radius-selector: 0.5rem;
--radius-field: 0.25rem; --radius-field: 0.25rem;
--radius-box: 0.5rem; --radius-box: 0.5rem;
--size-selector: 0.25rem; --size-selector: 0.25rem;
--size-field: 0.25rem; --size-field: 0.25rem;
--border: 1px; --border: 1px;
--depth: 1; --depth: 1;
--noise: 0; --noise: 0;
} }
@source "../../templates"; @source "../../templates";

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -1,26 +1,26 @@
// Color utilities // Color utilities
const getCSSVar = (varName) => { const getCSSVar = (varName) => {
return getComputedStyle(document.documentElement).getPropertyValue(varName).trim(); return getComputedStyle(document.documentElement).getPropertyValue(varName).trim();
} }
function getColorWithOpacity(className) { function getColorWithOpacity(className) {
const element = document.createElement('div'); const element = document.createElement('div');
element.className = className; element.className = className;
element.style.opacity = '1'; // Force opacity element.style.opacity = '1'; // Force opacity
element.textContent = '.'; element.textContent = '.';
document.body.appendChild(element); document.body.appendChild(element);
const computedColor = window.getComputedStyle(element).color; const computedColor = window.getComputedStyle(element).color;
document.body.removeChild(element); document.body.removeChild(element);
return computedColor; return computedColor;
} }
// Utility function to calculate mean and standard deviation // Utility function to calculate mean and standard deviation
function calculateStats(data) { function calculateStats(data) {
if (!data || data.length === 0) return { mean: 0, std: 0 }; if (!data || data.length === 0) return { mean: 0, std: 0 };
const mean = data.reduce((a, b) => a + b, 0) / data.length; const mean = data.reduce((a, b) => a + b, 0) / data.length;
const variance = data.reduce((a, b) => a + Math.pow(b - mean, 2), 0) / data.length; const variance = data.reduce((a, b) => a + Math.pow(b - mean, 2), 0) / data.length;
const std = Math.sqrt(variance); const std = Math.sqrt(variance);
return { mean, std }; return { mean, std };
} }

View File

@@ -1,14 +1,14 @@
/** @type {import('tailwindcss').Config} */ /** @type {import('tailwindcss').Config} */
module.exports = { module.exports = {
content: [ content: [
"./templates/**/*.html", "./templates/**/*.html",
"./static/src/**/*.js", "./static/src/**/*.js",
], ],
theme: { theme: {
extend: {}, extend: {},
}, },
plugins: [require("daisyui")], plugins: [require("daisyui")],
daisyui: { daisyui: {
themes: ["light", "dark"], themes: ["light", "dark"],
}, },
} }

View File

@@ -1,68 +1,68 @@
{% load static %} {% load static %}
<!DOCTYPE html> <!DOCTYPE html>
<html lang="en" data-theme="light"> <html lang="en" data-theme="light">
<head> <head>
<meta charset="UTF-8"> <meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0"> <meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>{% block title %}SWatcher{% endblock %}</title> <title>{% block title %}SWatcher{% endblock %}</title>
<link rel="stylesheet" href="{% static 'css/output.css' %}"> <link rel="stylesheet" href="{% static 'css/output.css' %}">
{% block imports %} {% block imports %}
{% endblock %} {% endblock %}
{% block css %} {% block css %}
{% endblock %} {% endblock %}
</head> </head>
<body> <body>
<!-- Navbar --> <!-- Navbar -->
<div class="navbar bg-base-100 shadow-lg"> <div class="navbar bg-base-100 shadow-lg">
<div class="navbar-start"> <div class="navbar-start">
<a class="btn btn-ghost text-xl" href="{% url 'index' %}">SWatcher</a> <a class="btn btn-ghost text-xl" href="{% url 'index' %}">SWatcher</a>
</div> </div>
<div class="navbar-center hidden lg:flex"> <div class="navbar-center hidden lg:flex">
</div> </div>
<div class="navbar-end"> <div class="navbar-end">
<div class="px-1 flex gap-3 pr-10"> <div class="px-1 flex gap-3 pr-10">
<span class="badge badge-primary badge-lg">{{ region.name }}</span> <span class="badge badge-primary badge-lg">{{ region.name }}</span>
<label class="swap swap-rotate"> <label class="swap swap-rotate">
<input type="checkbox" class="theme-controller" value="dark" /> <input type="checkbox" class="theme-controller" value="dark" />
<svg class="swap-off fill-current w-6 h-6" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24"> <svg class="swap-off fill-current w-6 h-6" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24">
<path d="M5.64,17l-.71.71a1,1,0,0,0,0,1.41,1,1,0,0,0,1.41,0l.71-.71A1,1,0,0,0,5.64,17ZM5,12a1,1,0,0,0-1-1H3a1,1,0,0,0,0,2H4A1,1,0,0,0,5,12Zm7-7a1,1,0,0,0,1-1V3a1,1,0,0,0-2,0V4A1,1,0,0,0,12,5ZM5.64,7.05a1,1,0,0,0,.7.29,1,1,0,0,0,.71-.29,1,1,0,0,0,0-1.41l-.71-.71A1,1,0,0,0,4.93,6.34Zm12,.29a1,1,0,0,0,.7-.29l.71-.71a1,1,0,1,0-1.41-1.41L17,5.64a1,1,0,0,0,0,1.41A1,1,0,0,0,17.66,7.34ZM21,11H20a1,1,0,0,0,0,2h1a1,1,0,0,0,0-2Zm-9,8a1,1,0,0,0-1,1v1a1,1,0,0,0,2,0V20A1,1,0,0,0,12,19ZM18.36,17A1,1,0,0,0,17,18.36l.71.71a1,1,0,0,0,1.41,0,1,1,0,0,0,0-1.41ZM12,6.5A5.5,5.5,0,1,0,17.5,12,5.51,5.51,0,0,0,12,6.5Zm0,9A3.5,3.5,0,1,1,15.5,12,3.5,3.5,0,0,1,12,15.5Z"/> <path d="M5.64,17l-.71.71a1,1,0,0,0,0,1.41,1,1,0,0,0,1.41,0l.71-.71A1,1,0,0,0,5.64,17ZM5,12a1,1,0,0,0-1-1H3a1,1,0,0,0,0,2H4A1,1,0,0,0,5,12Zm7-7a1,1,0,0,0,1-1V3a1,1,0,0,0-2,0V4A1,1,0,0,0,12,5ZM5.64,7.05a1,1,0,0,0,.7.29,1,1,0,0,0,.71-.29,1,1,0,0,0,0-1.41l-.71-.71A1,1,0,0,0,4.93,6.34Zm12,.29a1,1,0,0,0,.7-.29l.71-.71a1,1,0,1,0-1.41-1.41L17,5.64a1,1,0,0,0,0,1.41A1,1,0,0,0,17.66,7.34ZM21,11H20a1,1,0,0,0,0,2h1a1,1,0,0,0,0-2Zm-9,8a1,1,0,0,0-1,1v1a1,1,0,0,0,2,0V20A1,1,0,0,0,12,19ZM18.36,17A1,1,0,0,0,17,18.36l.71.71a1,1,0,0,0,1.41,0,1,1,0,0,0,0-1.41ZM12,6.5A5.5,5.5,0,1,0,17.5,12,5.51,5.51,0,0,0,12,6.5Zm0,9A3.5,3.5,0,1,1,15.5,12,3.5,3.5,0,0,1,12,15.5Z"/>
</svg> </svg>
<svg class="swap-on fill-current w-6 h-6" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24"> <svg class="swap-on fill-current w-6 h-6" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24">
<path d="M21.64,13a1,1,0,0,0-1.05-.14,8.05,8.05,0,0,1-3.37.73A8.15,8.15,0,0,1,9.08,5.49a8.59,8.59,0,0,1,.25-2A1,1,0,0,0,8,2.36,10.14,10.14,0,1,0,22,14.05,1,1,0,0,0,21.64,13Zm-9.5,6.69A8.14,8.14,0,0,1,7.08,5.22v.27A10.15,10.15,0,0,0,17.22,15.63a9.79,9.79,0,0,0,2.1-.22A8.11,8.11,0,0,1,12.14,19.73Z"/> <path d="M21.64,13a1,1,0,0,0-1.05-.14,8.05,8.05,0,0,1-3.37.73A8.15,8.15,0,0,1,9.08,5.49a8.59,8.59,0,0,1,.25-2A1,1,0,0,0,8,2.36,10.14,10.14,0,1,0,22,14.05,1,1,0,0,0,21.64,13Zm-9.5,6.69A8.14,8.14,0,0,1,7.08,5.22v.27A10.15,10.15,0,0,0,17.22,15.63a9.79,9.79,0,0,0,2.1-.22A8.11,8.11,0,0,1,12.14,19.73Z"/>
</svg> </svg>
</label> </label>
</div> </div>
</div> </div>
</div> </div>
<!-- Main Content --> <!-- Main Content -->
<main class="container mx-auto px-4 py-8 min-h-screen"> <main class="container mx-auto px-4 py-8 min-h-screen">
{% block content %} {% block content %}
{% endblock %} {% endblock %}
</main> </main>
<script> <script>
// Function to apply theme // Function to apply theme
function applyTheme(theme) { function applyTheme(theme) {
document.documentElement.setAttribute('data-theme', theme); document.documentElement.setAttribute('data-theme', theme);
const checkbox = document.querySelector('.theme-controller'); const checkbox = document.querySelector('.theme-controller');
checkbox.checked = (theme === 'dark'); checkbox.checked = (theme === 'dark');
document.dispatchEvent(new Event("themechange")); document.dispatchEvent(new Event("themechange"));
} }
// Load saved theme from localStorage // Load saved theme from localStorage
const savedTheme = localStorage.getItem('theme') || 'light'; const savedTheme = localStorage.getItem('theme') || 'light';
applyTheme(savedTheme); applyTheme(savedTheme);
// Listen for toggle changes // Listen for toggle changes
document.querySelector('.theme-controller').addEventListener('change', function() { document.querySelector('.theme-controller').addEventListener('change', function() {
const newTheme = this.checked ? 'dark' : 'light'; const newTheme = this.checked ? 'dark' : 'light';
applyTheme(newTheme); applyTheme(newTheme);
localStorage.setItem('theme', newTheme); localStorage.setItem('theme', newTheme);
}); });
</script> </script>
{% block script %} {% block script %}
{% endblock %} {% endblock %}
</body> </body>
</html> </html>

File diff suppressed because it is too large Load Diff

View File

@@ -1,16 +1,16 @@
""" """
ASGI config for watcher_visio project. ASGI config for watcher_visio project.
It exposes the ASGI callable as a module-level variable named ``application``. It exposes the ASGI callable as a module-level variable named ``application``.
For more information on this file, see For more information on this file, see
https://docs.djangoproject.com/en/5.2/howto/deployment/asgi/ https://docs.djangoproject.com/en/5.2/howto/deployment/asgi/
""" """
import os import os
from django.core.asgi import get_asgi_application from django.core.asgi import get_asgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'watcher_visio.settings') os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'watcher_visio.settings')
application = get_asgi_application() application = get_asgi_application()

View File

@@ -1,144 +1,157 @@
""" """
Django settings for watcher_visio project. Django settings for watcher_visio project.
Generated by 'django-admin startproject' using Django 5.2.8. Generated by 'django-admin startproject' using Django 5.2.8.
For more information on this file, see For more information on this file, see
https://docs.djangoproject.com/en/5.2/topics/settings/ https://docs.djangoproject.com/en/5.2/topics/settings/
For the full list of settings and their values, see For the full list of settings and their values, see
https://docs.djangoproject.com/en/5.2/ref/settings/ https://docs.djangoproject.com/en/5.2/ref/settings/
""" """
from pathlib import Path import os
from pathlib import Path
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent # Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production # Use mock data when no OpenStack/Prometheus access (e.g. local dev)
# See https://docs.djangoproject.com/en/5.2/howto/deployment/checklist/ USE_MOCK_DATA = os.environ.get("USE_MOCK_DATA", "false").lower() in ("1", "true", "yes")
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'django-insecure-747*14ir*49hoo6c2225)kxr%4^am0ub_s-m^_7i4cctu)v$g8' # Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/5.2/howto/deployment/checklist/
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True # SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'django-insecure-747*14ir*49hoo6c2225)kxr%4^am0ub_s-m^_7i4cctu)v$g8'
ALLOWED_HOSTS = []
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
# Application definition
ALLOWED_HOSTS = ['*']
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth', # Application definition
'django.contrib.contenttypes',
'django.contrib.sessions', INSTALLED_APPS = [
'django.contrib.messages', 'django.contrib.admin',
'django.contrib.staticfiles', 'django.contrib.auth',
'dashboard', 'django.contrib.contenttypes',
] 'django.contrib.sessions',
'django.contrib.messages',
# Prometheus settings (environment override recommended) 'django.contrib.staticfiles',
PROMETHEUS_URL = "http://localhost:9090" 'dashboard',
PROMETHEUS_METRICS = { ]
"cpu_usage": "",
"ram_usage": "" # Prometheus settings (environment override recommended)
} PROMETHEUS_URL = "http://10.226.74.53:9090/"
PROMETHEUS_METRICS = {
# Openstack cloud settings "cpu_usage": "rate(libvirt_domain_info_cpu_time_seconds_total)[300s]",
OPENSTACK_REGION_NAME = "default" "ram_usage": "avg_over_time(libvirt_domain_info_memory_usage_bytes[300s]"
OPENSTACK_CLOUD = "default" }
# Openstack watcher endoint settings # Openstack cloud settings
WATCHER_ENDPOINT_NAME = "infra-optim" OPENSTACK_REGION_NAME = "cl2k1distlab"
WATCHER_INTERFACE_NAME = "public" OPENSTACK_CLOUD = "distlab"
MIDDLEWARE = [ # Openstack watcher endoint settings
'django.middleware.security.SecurityMiddleware', WATCHER_ENDPOINT_NAME = "infra-optim"
'django.contrib.sessions.middleware.SessionMiddleware', WATCHER_INTERFACE_NAME = "public"
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware', MIDDLEWARE = [
'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.middleware.security.SecurityMiddleware',
'django.contrib.messages.middleware.MessageMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware', 'django.middleware.common.CommonMiddleware',
] 'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
ROOT_URLCONF = 'watcher_visio.urls' 'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
TEMPLATES = [ ]
{
'BACKEND': 'django.template.backends.django.DjangoTemplates', ROOT_URLCONF = 'watcher_visio.urls'
'DIRS': [BASE_DIR / 'templates'],
'APP_DIRS': True, TEMPLATES = [
'OPTIONS': { {
'context_processors': [ 'BACKEND': 'django.template.backends.django.DjangoTemplates',
'django.template.context_processors.request', 'DIRS': [BASE_DIR / 'templates'],
'django.contrib.auth.context_processors.auth', 'APP_DIRS': True,
'django.contrib.messages.context_processors.messages', 'OPTIONS': {
], 'context_processors': [
}, 'django.template.context_processors.request',
}, 'django.contrib.auth.context_processors.auth',
] 'django.contrib.messages.context_processors.messages',
],
WSGI_APPLICATION = 'watcher_visio.wsgi.application' },
},
]
# Database
# https://docs.djangoproject.com/en/5.2/ref/settings/#databases WSGI_APPLICATION = 'watcher_visio.wsgi.application'
DATABASES = {
'default': { # Database
'ENGINE': 'django.db.backends.sqlite3', # https://docs.djangoproject.com/en/5.2/ref/settings/#databases
'NAME': BASE_DIR / 'db.sqlite3',
} DATABASES = {
} 'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': BASE_DIR / 'db.sqlite3',
# Password validation }
# https://docs.djangoproject.com/en/5.2/ref/settings/#auth-password-validators }
AUTH_PASSWORD_VALIDATORS = [
{ # Password validation
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', # https://docs.djangoproject.com/en/5.2/ref/settings/#auth-password-validators
},
{ AUTH_PASSWORD_VALIDATORS = [
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', {
}, 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
{ },
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', {
}, 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
{ },
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', {
}, 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
] },
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
# Internationalization },
# https://docs.djangoproject.com/en/5.2/topics/i18n/ ]
LANGUAGE_CODE = 'en-us'
# Internationalization
TIME_ZONE = 'UTC' # https://docs.djangoproject.com/en/5.2/topics/i18n/
USE_I18N = True LANGUAGE_CODE = 'en-us'
USE_TZ = True TIME_ZONE = 'UTC'
USE_I18N = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/5.2/howto/static-files/ USE_TZ = True
STATIC_URL = '/static/'
# Static files (CSS, JavaScript, Images)
STATICFILES_DIRS = [ # https://docs.djangoproject.com/en/5.2/howto/static-files/
BASE_DIR / "static",
] STATIC_URL = '/static/'
STATIC_ROOT = BASE_DIR / "staticfiles" STATICFILES_DIRS = [
BASE_DIR / "static",
# Default primary key field type ]
# https://docs.djangoproject.com/en/5.2/ref/settings/#default-auto-field
STATIC_ROOT = BASE_DIR / "staticfiles"
DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField'
# Default primary key field type
# https://docs.djangoproject.com/en/5.2/ref/settings/#default-auto-field
DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField'
# Dashboard cache (reduces load on OpenStack/Prometheus and allows concurrent users)
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': 'watcher-visio-dashboard',
}
}
DASHBOARD_CACHE_TTL = 120 # seconds

View File

@@ -1,23 +1,23 @@
""" """
URL configuration for watcher_visio project. URL configuration for watcher_visio project.
The `urlpatterns` list routes URLs to views. For more information please see: The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/5.2/topics/http/urls/ https://docs.djangoproject.com/en/5.2/topics/http/urls/
Examples: Examples:
Function views Function views
1. Add an import: from my_app import views 1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home') 2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views Class-based views
1. Add an import: from other_app.views import Home 1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home') 2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf Including another URLconf
1. Import the include() function: from django.urls import include, path 1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls')) 2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
""" """
from django.contrib import admin from django.contrib import admin
from django.urls import path, include from django.urls import path, include
urlpatterns = [ urlpatterns = [
path('admin/', admin.site.urls), path('admin/', admin.site.urls),
path('', include('dashboard.urls')), path('', include('dashboard.urls')),
] ]

View File

@@ -1,16 +1,16 @@
""" """
WSGI config for watcher_visio project. WSGI config for watcher_visio project.
It exposes the WSGI callable as a module-level variable named ``application``. It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see For more information on this file, see
https://docs.djangoproject.com/en/5.2/howto/deployment/wsgi/ https://docs.djangoproject.com/en/5.2/howto/deployment/wsgi/
""" """
import os import os
from django.core.wsgi import get_wsgi_application from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'watcher_visio.settings') os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'watcher_visio.settings')
application = get_wsgi_application() application = get_wsgi_application()