Compare commits
11 Commits
Author | SHA1 | Date | |
---|---|---|---|
2d48e87893 | |||
6c1a62e09d | |||
a5d3b13629 | |||
83f71b3f81 | |||
730168ab61 | |||
8bef6d676c | |||
813265f8ee | |||
b47070cfc2 | |||
92ef3e6a85 | |||
a63776fb3f | |||
e24a29e94f |
@ -7,22 +7,7 @@ IMAGE_NAME=numberimage
|
|||||||
docker build --progress=plain -t $IMAGE_NAME .
|
docker build --progress=plain -t $IMAGE_NAME .
|
||||||
|
|
||||||
|
|
||||||
SECRETS=`mktemp`
|
. load-debug-env
|
||||||
gpg --decrypt --passphrase $GPG_PASSPHRASE --yes --batch --output $SECRETS ./deployment/secrets.asc
|
|
||||||
. $SECRETS
|
|
||||||
rm $SECRETS
|
|
||||||
|
|
||||||
DB_NAMESPACE=database1
|
|
||||||
DB_DEPLOYNAME=database
|
|
||||||
|
|
||||||
REDIS_NAMESPACE=redis
|
|
||||||
REDIS_SERVICE_NAME=redis
|
|
||||||
|
|
||||||
PGHOST=`kubectl get services $DB_DEPLOYNAME -n $DB_NAMESPACE -o jsonpath="{.status.loadBalancer.ingress[0].ip}"`
|
|
||||||
REDISHOST=`kubectl get services $REDIS_SERVICE_NAME -n $REDIS_NAMESPACE -o jsonpath="{.status.loadBalancer.ingress[0].ip}"`
|
|
||||||
|
|
||||||
REDIS_URL=redis://$REDISHOST:6379/4
|
|
||||||
|
|
||||||
|
|
||||||
docker run \
|
docker run \
|
||||||
-it \
|
-it \
|
||||||
|
@ -1,27 +1,27 @@
|
|||||||
apiVersion: apps/v1
|
apiVersion: apps/v1
|
||||||
kind: Deployment
|
kind: Deployment
|
||||||
metadata:
|
metadata:
|
||||||
name: pv-stats
|
name: numbers
|
||||||
labels:
|
labels:
|
||||||
app: pv-stats
|
app: numbers
|
||||||
annotations:
|
annotations:
|
||||||
secret.reloader.stakater.com/reload: pv-stats
|
secret.reloader.stakater.com/reload: numbers
|
||||||
spec:
|
spec:
|
||||||
replicas: 1
|
replicas: 1
|
||||||
selector:
|
selector:
|
||||||
matchLabels:
|
matchLabels:
|
||||||
app: pv-stats
|
app: numbers
|
||||||
template:
|
template:
|
||||||
metadata:
|
metadata:
|
||||||
labels:
|
labels:
|
||||||
app: pv-stats
|
app: numbers
|
||||||
spec:
|
spec:
|
||||||
containers:
|
containers:
|
||||||
- name: pv-stats
|
- name: numbers
|
||||||
image: %IMAGE%
|
image: %IMAGE%
|
||||||
envFrom:
|
envFrom:
|
||||||
- secretRef:
|
- secretRef:
|
||||||
name: pv-stats
|
name: numbers
|
||||||
ports:
|
ports:
|
||||||
- containerPort: 8080
|
- containerPort: 8080
|
||||||
protocol: TCP
|
protocol: TCP
|
||||||
@ -29,11 +29,11 @@ spec:
|
|||||||
apiVersion: v1
|
apiVersion: v1
|
||||||
kind: Service
|
kind: Service
|
||||||
metadata:
|
metadata:
|
||||||
name: pv-stats
|
name: numbers
|
||||||
spec:
|
spec:
|
||||||
type: ClusterIP
|
type: ClusterIP
|
||||||
selector:
|
selector:
|
||||||
app: pv-stats
|
app: numbers
|
||||||
ports:
|
ports:
|
||||||
- name: http
|
- name: http
|
||||||
targetPort: 8080
|
targetPort: 8080
|
||||||
@ -42,7 +42,7 @@ spec:
|
|||||||
apiVersion: networking.k8s.io/v1
|
apiVersion: networking.k8s.io/v1
|
||||||
kind: Ingress
|
kind: Ingress
|
||||||
metadata:
|
metadata:
|
||||||
name: pv-stats
|
name: numbers
|
||||||
annotations:
|
annotations:
|
||||||
cert-manager.io/cluster-issuer: letsencrypt-production-http
|
cert-manager.io/cluster-issuer: letsencrypt-production-http
|
||||||
spec:
|
spec:
|
||||||
@ -58,7 +58,7 @@ spec:
|
|||||||
pathType: Prefix
|
pathType: Prefix
|
||||||
backend:
|
backend:
|
||||||
service:
|
service:
|
||||||
name: pv-stats
|
name: numbers
|
||||||
port:
|
port:
|
||||||
number: 80
|
number: 80
|
||||||
|
|
||||||
|
@ -25,7 +25,7 @@ kubectl create namespace $NAMESPACE \
|
|||||||
# rm $SECRETS_FILE
|
# rm $SECRETS_FILE
|
||||||
eval "`cat secrets.asc | /usr/local/bin/decrypt-secrets.sh`"
|
eval "`cat secrets.asc | /usr/local/bin/decrypt-secrets.sh`"
|
||||||
|
|
||||||
kubectl create secret generic pv-stats \
|
kubectl create secret generic numbers \
|
||||||
--dry-run=client \
|
--dry-run=client \
|
||||||
-o yaml \
|
-o yaml \
|
||||||
--save-config \
|
--save-config \
|
||||||
|
15
load-debug-env
Normal file
15
load-debug-env
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
SECRETS=`mktemp`
|
||||||
|
gpg --decrypt --passphrase $GPG_PASSPHRASE --yes --batch --output $SECRETS ./deployment/secrets.asc
|
||||||
|
. $SECRETS
|
||||||
|
rm $SECRETS
|
||||||
|
|
||||||
|
DB_NAMESPACE=database1
|
||||||
|
DB_DEPLOYNAME=database
|
||||||
|
|
||||||
|
REDIS_NAMESPACE=redis
|
||||||
|
REDIS_SERVICE_NAME=redis
|
||||||
|
|
||||||
|
PGHOST=`kubectl get services $DB_DEPLOYNAME -n $DB_NAMESPACE -o jsonpath="{.status.loadBalancer.ingress[0].ip}"`
|
||||||
|
REDISHOST=`kubectl get services $REDIS_SERVICE_NAME -n $REDIS_NAMESPACE -o jsonpath="{.status.loadBalancer.ingress[0].ip}"`
|
||||||
|
|
||||||
|
REDIS_URL=redis://$REDISHOST:6379/4
|
@ -1,120 +1,129 @@
|
|||||||
from flask import Flask, session, g, render_template_string
|
from flask import Flask, session, g, render_template_string, Response
|
||||||
from loguru import logger
|
from loguru import logger
|
||||||
import json
|
import json
|
||||||
import plotly.express as px
|
import matplotlib.pyplot as plt
|
||||||
import plotly.graph_objects as po
|
import matplotlib.dates as mdates
|
||||||
|
from matplotlib.ticker import ScalarFormatter
|
||||||
import pandas as pd
|
import pandas as pd
|
||||||
import psycopg
|
import psycopg
|
||||||
import sqlalchemy
|
import sqlalchemy
|
||||||
import time
|
import time
|
||||||
|
import io
|
||||||
|
|
||||||
from app import app
|
from app import app
|
||||||
from app import oidc
|
from app import oidc
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@app.route('/ntpserver')
|
|
||||||
def ntpserver():
|
|
||||||
try:
|
|
||||||
dbh = psycopg.connect()
|
|
||||||
engine = sqlalchemy.create_engine("postgresql+psycopg://", creator=lambda: dbh)
|
|
||||||
|
|
||||||
query = """
|
@app.route('/ntp/stratum-rootdisp.png')
|
||||||
select time_bucket('5 minutes', time) as bucket,
|
def stratum_rootdisp_png():
|
||||||
device,
|
dbh = psycopg.connect()
|
||||||
avg(cast(values->'rootdisp'->>'value' as float)) as rootdisp,
|
engine = sqlalchemy.create_engine("postgresql+psycopg://", creator=lambda: dbh)
|
||||||
max(cast(values->'stratum'->>'value' as int)) as stratum
|
query = """
|
||||||
from measurements
|
select time_bucket('5 minutes', time) as bucket,
|
||||||
where time >= date_trunc('day', now()) AND time < date_trunc('day', now()) + '1 day'::interval and
|
attributes->>'Label' as device,
|
||||||
application = 'TSM' and attributes->>'Label' = 'david'
|
avg(cast(values->'rootdisp'->>'value' as float)) as rootdisp,
|
||||||
group by bucket, device
|
max(cast(values->'stratum'->>'value' as int)) as stratum
|
||||||
order by bucket, device
|
from measurements
|
||||||
"""
|
where time >= date_trunc('day', now()) AND time < date_trunc('day', now()) + '1 day'::interval and
|
||||||
|
application = 'SNMP' and attributes->>'Label' IN ('harrison', 'david')
|
||||||
|
group by bucket, attributes->>'Label'
|
||||||
|
order by bucket, attributes->>'Label'
|
||||||
|
"""
|
||||||
|
df = pd.read_sql(query, con=engine)
|
||||||
|
|
||||||
df = pd.read_sql(query, con=engine)
|
df['rootdisp'] = df['rootdisp'] / 1e6
|
||||||
|
|
||||||
fig = po.Figure()
|
|
||||||
fig.add_trace(po.Scatter(x=df['bucket'], y=df['rootdisp'], mode='lines', name='Root Dispersion', yaxis='y1', line=dict(color='red')))
|
|
||||||
fig.add_trace(po.Scatter(x=df['bucket'], y=df['stratum'], mode='lines', name='Stratum', yaxis='y2', line=dict(color='blue')))
|
|
||||||
|
|
||||||
fig.update_layout(
|
|
||||||
title='NTP Server Numbers',
|
|
||||||
|
|
||||||
# Linke Y-Achse
|
|
||||||
yaxis=dict(
|
|
||||||
title='Root Dispersion',
|
|
||||||
ticksuffix=' ms'
|
|
||||||
),
|
|
||||||
|
|
||||||
# Rechte Y-Achse
|
|
||||||
yaxis2=dict(
|
|
||||||
title='Stratum',
|
|
||||||
overlaying='y', # Legt die zweite Y-Achse über die erste
|
|
||||||
side='right', # Setzt sie auf die rechte Seite
|
|
||||||
tickmode='linear', # Stellt sicher, dass die Ticks in festen Intervallen sind
|
|
||||||
dtick=1, # Zeigt nur ganzzahlige Ticks
|
|
||||||
),
|
|
||||||
|
|
||||||
legend=dict(x=0.05, y=1) # Position der Legende
|
|
||||||
)
|
|
||||||
|
|
||||||
graph_html_1 = fig.to_html(full_html=False, default_height='30%')
|
|
||||||
|
|
||||||
query = """
|
|
||||||
select time_bucket('5 minutes', time) as bucket,
|
|
||||||
device,
|
|
||||||
avg(cast(values->'time-req-pkts'->>'value' as float)) as packets
|
|
||||||
from measurements
|
|
||||||
where time >= date_trunc('day', now()) AND time < date_trunc('day', now()) + '1 day'::interval and
|
|
||||||
application = 'SNMP' and attributes->>'Label' = 'david'
|
|
||||||
group by bucket, device
|
|
||||||
order by bucket, device
|
|
||||||
"""
|
|
||||||
df = pd.read_sql(query, con=engine)
|
|
||||||
fig_2 = px.line(df, x='bucket', y='packets')
|
|
||||||
fig_2.update_layout(
|
|
||||||
xaxis_title="",
|
|
||||||
yaxis_title="",
|
|
||||||
yaxis_ticksuffix="p/s",
|
|
||||||
title=f"Time Requests"
|
|
||||||
)
|
|
||||||
graph_html_2 = fig_2.to_html(full_html=False, default_height='30%')
|
|
||||||
|
|
||||||
query = """
|
|
||||||
select time_bucket('5 minutes', time) as bucket,
|
|
||||||
device,
|
|
||||||
avg(cast(values->'load1'->>'value' as float)) as loadaverage1min
|
|
||||||
from measurements
|
|
||||||
where time >= date_trunc('day', now()) AND time < date_trunc('day', now()) + '1 day'::interval and
|
|
||||||
application = 'SNMP' and attributes->>'Label' = 'david'
|
|
||||||
group by bucket, device
|
|
||||||
order by bucket, device
|
|
||||||
"""
|
|
||||||
df = pd.read_sql(query, con=engine)
|
|
||||||
fig_3 = px.line(df, x='bucket', y='loadaverage1min')
|
|
||||||
fig_3.update_layout(
|
|
||||||
xaxis_title="",
|
|
||||||
yaxis_title="",
|
|
||||||
title=f"CPU Load"
|
|
||||||
)
|
|
||||||
graph_html_3 = fig_3.to_html(full_html=False, default_height='30%')
|
|
||||||
|
|
||||||
return render_template_string(f"""
|
|
||||||
<html>
|
|
||||||
<head>
|
|
||||||
<title>NTP Server Numbers</title>
|
|
||||||
</head>
|
|
||||||
<body>
|
|
||||||
{graph_html_1}
|
|
||||||
{graph_html_2}
|
|
||||||
{graph_html_3}
|
|
||||||
</body>
|
|
||||||
</html>
|
|
||||||
""")
|
|
||||||
except Exception as e:
|
|
||||||
raise Exception(f"Error when querying NTP server values: {e}")
|
|
||||||
finally:
|
|
||||||
if dbh is not None:
|
|
||||||
dbh.close()
|
|
||||||
|
|
||||||
|
|
||||||
|
# Extract date for title
|
||||||
|
plot_date = df['bucket'].dt.date.iloc[0] if not df.empty else "Unknown Date"
|
||||||
|
|
||||||
|
# Create figure with two side-by-side subplots
|
||||||
|
fig, axes = plt.subplots(1, 2, figsize=(15, 5), sharex=True)
|
||||||
|
|
||||||
|
for i, device in enumerate(['harrison', 'david']):
|
||||||
|
ax1 = axes[i]
|
||||||
|
ax2 = ax1.twinx()
|
||||||
|
|
||||||
|
device_df = df[df['device'] == device]
|
||||||
|
|
||||||
|
ax1.plot(device_df['bucket'], device_df['rootdisp'], 'r-', label='Root Dispersion')
|
||||||
|
ax1.set_xlabel('Time')
|
||||||
|
ax1.set_ylabel('Root Dispersion (ms)', color='r')
|
||||||
|
ax1.tick_params(axis='y', labelcolor='r')
|
||||||
|
|
||||||
|
ax2.plot(device_df['bucket'], device_df['stratum'], 'b-', label='Stratum')
|
||||||
|
ax2.set_ylabel('Stratum', color='b')
|
||||||
|
ax2.tick_params(axis='y', labelcolor='b')
|
||||||
|
ax2.set_yticks(range(int(device_df['stratum'].min()), int(device_df['stratum'].max()) + 1))
|
||||||
|
|
||||||
|
ax1.set_title(f'{device.capitalize()}')
|
||||||
|
|
||||||
|
ax1.xaxis.set_major_formatter(mdates.DateFormatter('%H:%M'))
|
||||||
|
fig.autofmt_xdate(rotation=45)
|
||||||
|
|
||||||
|
fig.suptitle(f'Stratum and Root Dispersion - {plot_date}')
|
||||||
|
fig.tight_layout()
|
||||||
|
|
||||||
|
img_io = io.BytesIO()
|
||||||
|
plt.savefig(img_io, format='png')
|
||||||
|
img_io.seek(0)
|
||||||
|
plt.close(fig)
|
||||||
|
|
||||||
|
return Response(img_io, mimetype='image/png')
|
||||||
|
|
||||||
|
|
||||||
|
@app.route('/ntp/packets-load.png')
|
||||||
|
def packets_load_png():
|
||||||
|
dbh = psycopg.connect()
|
||||||
|
engine = sqlalchemy.create_engine("postgresql+psycopg://", creator=lambda: dbh)
|
||||||
|
query = """
|
||||||
|
select time_bucket('5 minutes', time) as bucket,
|
||||||
|
attributes->>'Label' as device,
|
||||||
|
avg(cast(values->'load1'->>'value' as float)) as load,
|
||||||
|
avg(cast(values->'processed-pkts'->>'value' as int)) as packets
|
||||||
|
from measurements
|
||||||
|
where time >= date_trunc('day', now()) AND time < date_trunc('day', now()) + '1 day'::interval and
|
||||||
|
application = 'SNMP' and attributes->>'Label' IN ('harrison', 'david')
|
||||||
|
group by bucket, attributes->>'Label'
|
||||||
|
order by bucket, attributes->>'Label'
|
||||||
|
"""
|
||||||
|
df = pd.read_sql(query, con=engine)
|
||||||
|
|
||||||
|
|
||||||
|
# Extract date for title
|
||||||
|
plot_date = df['bucket'].dt.date.iloc[0] if not df.empty else "Unknown Date"
|
||||||
|
|
||||||
|
# Create figure with two side-by-side subplots
|
||||||
|
fig, axes = plt.subplots(1, 2, figsize=(15, 5), sharex=True)
|
||||||
|
|
||||||
|
for i, device in enumerate(['harrison', 'david']):
|
||||||
|
ax1 = axes[i]
|
||||||
|
ax2 = ax1.twinx()
|
||||||
|
|
||||||
|
device_df = df[df['device'] == device]
|
||||||
|
|
||||||
|
ax1.plot(device_df['bucket'], device_df['load'], 'r-', label='CPU Load')
|
||||||
|
ax1.set_xlabel('Time')
|
||||||
|
ax1.set_ylabel('Load', color='r')
|
||||||
|
ax1.tick_params(axis='y', labelcolor='r')
|
||||||
|
|
||||||
|
ax2.plot(device_df['bucket'], device_df['packets'], 'b-', label='Processed Packets')
|
||||||
|
ax2.set_ylabel('Packets', color='b')
|
||||||
|
ax2.tick_params(axis='y', labelcolor='b')
|
||||||
|
|
||||||
|
ax1.set_title(f'{device.capitalize()}')
|
||||||
|
|
||||||
|
ax1.xaxis.set_major_formatter(mdates.DateFormatter('%H:%M'))
|
||||||
|
fig.autofmt_xdate(rotation=45)
|
||||||
|
|
||||||
|
fig.suptitle(f'CPU Load and Processed Packets - {plot_date}')
|
||||||
|
fig.tight_layout()
|
||||||
|
|
||||||
|
img_io = io.BytesIO()
|
||||||
|
plt.savefig(img_io, format='png')
|
||||||
|
img_io.seek(0)
|
||||||
|
plt.close(fig)
|
||||||
|
|
||||||
|
return Response(img_io, mimetype='image/png')
|
||||||
|
@ -39,3 +39,5 @@ urllib3==2.3.0
|
|||||||
Werkzeug==3.1.3
|
Werkzeug==3.1.3
|
||||||
zipp==3.21.0
|
zipp==3.21.0
|
||||||
pillow==11.1.0
|
pillow==11.1.0
|
||||||
|
matplotlib==3.10.1
|
||||||
|
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
from flask import abort, Response
|
from flask import abort, Response
|
||||||
from PIL import Image, ImageDraw
|
from PIL import Image, ImageDraw
|
||||||
|
import io
|
||||||
from app import app
|
from app import app
|
||||||
from app import oidc
|
from app import oidc
|
||||||
|
|
||||||
|
@ -10,7 +10,7 @@ import ntp_routes
|
|||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
app.run(port=8080)
|
app.run(host='0.0.0.0', port=8080)
|
||||||
else:
|
else:
|
||||||
exposed_app = ProxyFix(app, x_for=1, x_host=1)
|
exposed_app = ProxyFix(app, x_for=1, x_host=1)
|
||||||
|
|
||||||
|
Loading…
x
Reference in New Issue
Block a user