diff --git a/debug-build-run.sh b/debug-build-run.sh
new file mode 100755
index 0000000..10aabb1
--- /dev/null
+++ b/debug-build-run.sh
@@ -0,0 +1,40 @@
+#!/bin/bash
+
+set -x
+
+IMAGE_NAME=numberimage
+
+docker build --progress=plain -t $IMAGE_NAME .
+
+
+SECRETS=`mktemp`
+gpg --decrypt --passphrase $GPG_PASSPHRASE --yes --batch --output $SECRETS ./deployment/secrets.asc
+. $SECRETS
+rm $SECRETS
+
+DB_NAMESPACE=database1
+DB_DEPLOYNAME=database
+
+REDIS_NAMESPACE=redis
+REDIS_SERVICE_NAME=redis
+
+PGHOST=`kubectl get services $DB_DEPLOYNAME -n $DB_NAMESPACE -o jsonpath="{.status.loadBalancer.ingress[0].ip}"`
+REDISHOST=`kubectl get services $REDIS_SERVICE_NAME -n $REDIS_NAMESPACE -o jsonpath="{.status.loadBalancer.ingress[0].ip}"`
+
+REDIS_URL=redis://$REDISHOST:6379/4
+
+
+docker run \
+ -it \
+ --rm \
+ -e "REDIS_URL=$REDIS_URL" \
+ -e "SECRET_KEY=$SECRET_KEY" \
+ -e "OIDC_CLIENT_SECRETS=$OIDC_CLIENT_SECRETS" \
+ -e "PGHOST=$PGHOST" \
+ -e "PGDATABASE=$PGDATABASE" \
+ -e "PGSSLMODE=$PGSSLMODE" \
+ -e "PGUSER=$PGUSER" \
+ -e "PGPASSWORD=$PGPASSWORD" \
+ -p 8080:8080 \
+ $IMAGE_NAME
+
diff --git a/src/app.py b/src/app.py
new file mode 100644
index 0000000..f7ef5f5
--- /dev/null
+++ b/src/app.py
@@ -0,0 +1,32 @@
+from flask import Flask, session, g, render_template_string
+from flask_session import Session
+from flask_oidc import OpenIDConnect
+from werkzeug.middleware.proxy_fix import ProxyFix
+from loguru import logger
+import redis
+import json
+import os
+
+try:
+ redis_url = os.environ['REDIS_URL']
+ oidc_client_secrets = os.environ['OIDC_CLIENT_SECRETS']
+ secret_key = os.environ['SECRET_KEY']
+except KeyError as e:
+ logger.error(f"Required environment variable not set ({e})")
+ raise e
+
+app = Flask(__name__)
+app.config.update({
+ 'SECRET_KEY': secret_key,
+ 'SESSION_TYPE': 'redis',
+ 'SESSION_REDIS': redis.from_url(redis_url),
+ 'OIDC_CLIENT_SECRETS': json.loads(oidc_client_secrets),
+ 'OIDC_SCOPES': 'openid email',
+ 'OIDC_USER_INFO_ENABLED': True,
+ 'SESSION_USE_SIGNER': True,
+})
+Session(app)
+oidc = OpenIDConnect(app)
+
+
+
diff --git a/src/debug_routes.py b/src/debug_routes.py
new file mode 100644
index 0000000..ecb2845
--- /dev/null
+++ b/src/debug_routes.py
@@ -0,0 +1,16 @@
+from loguru import logger
+import json
+
+from app import app
+from app import oidc
+
+
+@app.route('/token_debug', methods=['GET'])
+@oidc.require_login
+def token_debug():
+ # Access Token vom Identity Provider abrufen
+ access_token = oidc.get_access_token()
+ return json.dumps({
+ "access_token": access_token
+ })
+
diff --git a/src/ntp_routes.py b/src/ntp_routes.py
new file mode 100644
index 0000000..15114e2
--- /dev/null
+++ b/src/ntp_routes.py
@@ -0,0 +1,119 @@
+from loguru import logger
+import json
+import plotly.express as px
+import plotly.graph_objects as po
+import pandas as pd
+import psycopg
+import sqlalchemy
+import time
+
+from app import app
+from app import oidc
+
+
+
+@app.route('/ntpserver')
+def ntpserver():
+ try:
+ dbh = psycopg.connect()
+ engine = sqlalchemy.create_engine("postgresql+psycopg://", creator=lambda: dbh)
+
+ query = """
+ select time_bucket('5 minutes', time) as bucket,
+ device,
+ avg(cast(values->'rootdisp'->>'value' as float)) as rootdisp,
+ max(cast(values->'stratum'->>'value' as int)) as stratum
+ from measurements
+ where time >= date_trunc('day', now()) AND time < date_trunc('day', now()) + '1 day'::interval and
+ application = 'TSM' and attributes->>'Label' = 'david'
+ group by bucket, device
+ order by bucket, device
+ """
+
+ df = pd.read_sql(query, con=engine)
+
+ fig = po.Figure()
+ fig.add_trace(po.Scatter(x=df['bucket'], y=df['rootdisp'], mode='lines', name='Root Dispersion', yaxis='y1', line=dict(color='red')))
+ fig.add_trace(po.Scatter(x=df['bucket'], y=df['stratum'], mode='lines', name='Stratum', yaxis='y2', line=dict(color='blue')))
+
+ fig.update_layout(
+ title='NTP Server Numbers',
+
+ # Linke Y-Achse
+ yaxis=dict(
+ title='Root Dispersion',
+ ticksuffix=' ms'
+ ),
+
+ # Rechte Y-Achse
+ yaxis2=dict(
+ title='Stratum',
+ overlaying='y', # Legt die zweite Y-Achse über die erste
+ side='right', # Setzt sie auf die rechte Seite
+ tickmode='linear', # Stellt sicher, dass die Ticks in festen Intervallen sind
+ dtick=1, # Zeigt nur ganzzahlige Ticks
+ ),
+
+ legend=dict(x=0.05, y=1) # Position der Legende
+ )
+
+ graph_html_1 = fig.to_html(full_html=False, default_height='30%')
+
+ query = """
+ select time_bucket('5 minutes', time) as bucket,
+ device,
+ avg(cast(values->'time-req-pkts'->>'value' as float)) as packets
+ from measurements
+ where time >= date_trunc('day', now()) AND time < date_trunc('day', now()) + '1 day'::interval and
+ application = 'SNMP' and attributes->>'Label' = 'david'
+ group by bucket, device
+ order by bucket, device
+ """
+ df = pd.read_sql(query, con=engine)
+ fig_2 = px.line(df, x='bucket', y='packets')
+ fig_2.update_layout(
+ xaxis_title="",
+ yaxis_title="",
+ yaxis_ticksuffix="p/s",
+ title=f"Time Requests"
+ )
+ graph_html_2 = fig_2.to_html(full_html=False, default_height='30%')
+
+ query = """
+ select time_bucket('5 minutes', time) as bucket,
+ device,
+ avg(cast(values->'load1'->>'value' as float)) as loadaverage1min
+ from measurements
+ where time >= date_trunc('day', now()) AND time < date_trunc('day', now()) + '1 day'::interval and
+ application = 'SNMP' and attributes->>'Label' = 'david'
+ group by bucket, device
+ order by bucket, device
+ """
+ df = pd.read_sql(query, con=engine)
+ fig_3 = px.line(df, x='bucket', y='loadaverage1min')
+ fig_3.update_layout(
+ xaxis_title="",
+ yaxis_title="",
+ title=f"CPU Load"
+ )
+ graph_html_3 = fig_3.to_html(full_html=False, default_height='30%')
+
+ return render_template_string(f"""
+
+
+ NTP Server Numbers
+
+
+ {graph_html_1}
+ {graph_html_2}
+ {graph_html_3}
+
+
+ """)
+ except Exception as e:
+ raise Exception(f"Error when querying NTP server values: {e}")
+ finally:
+ if dbh is not None:
+ dbh.close()
+
+
diff --git a/src/pv_routes.py b/src/pv_routes.py
new file mode 100644
index 0000000..191e793
--- /dev/null
+++ b/src/pv_routes.py
@@ -0,0 +1,116 @@
+from loguru import logger
+import json
+import plotly.express as px
+import plotly.graph_objects as po
+import pandas as pd
+import psycopg
+import sqlalchemy
+import time
+
+from app import app
+from app import oidc
+
+
+@app.route('/')
+@oidc.require_login
+def index():
+ try:
+ stepX_time = time.time()
+ dbh = psycopg.connect()
+ engine = sqlalchemy.create_engine("postgresql+psycopg://", creator=lambda: dbh)
+
+ step0_time = time.time()
+ df = pd.read_sql("SELECT month, cast(year AS varchar), current_energy AS value FROM pv_energy_by_month", con=engine)
+ step1_time = time.time()
+ duration1 = step1_time - step0_time
+ logger.info(f"{duration1=}")
+ fig_1 = px.bar(df, x='month', y='value', color='year', barmode='group')
+ step2_time = time.time()
+ duration2 = step2_time - step1_time
+ logger.info(f"{duration2=}")
+ fig_1.update_layout(
+ title=f"Jahreswerte Exportierte Energie {duration1:.3f}, {duration2:.3f}",
+ xaxis_title="",
+ yaxis_title="",
+ legend_title="Jahr",
+ xaxis=dict(
+ tickmode="array",
+ tickvals=list(range(1, 13)), # Monate 1–12
+ ticktext=["Jan", "Feb", "Mär", "Apr", "Mai", "Jun", "Jul", "Aug", "Sep", "Okt", "Nov", "Dez"]
+ ),
+ yaxis=dict(ticksuffix=" kWh")
+ )
+ graph_html_1 = fig_1.to_html(full_html=False, default_height='30%')
+
+ step3_time = time.time()
+ df = pd.read_sql("SELECT time_bucket('5 minutes', time) AS bucket, AVG(power) AS avg_power FROM pv_power_v WHERE time >= date_trunc('day', now()) - '1 day'::interval AND time < date_trunc('day', now()) GROUP BY bucket ORDER BY bucket", con=engine)
+ step4_time = time.time()
+ duration3 = step4_time - step3_time
+ logger.info(f"{duration3=}")
+ fig_2 = px.line(df, x='bucket', y='avg_power')
+ step5_time = time.time()
+ duration4 = step5_time - step4_time
+ logger.info(f"{duration4=}")
+ fig_2.update_layout(
+ xaxis_title="",
+ yaxis_title="",
+ title=f"Export gestern {duration3:.3f}, {duration4:.3f}",
+ yaxis=dict(ticksuffix=" W")
+ )
+ graph_html_2 = fig_2.to_html(full_html=False, default_height='30%')
+
+ step6_time = time.time()
+ df = pd.read_sql("SELECT time_bucket('5 minutes', time) AS bucket, AVG(power) AS avg_power FROM pv_power_v WHERE time >= date_trunc('day', now()) AND time < date_trunc('day', now()) + '1 day'::interval GROUP BY bucket ORDER BY bucket", con=engine)
+ step7_time = time.time()
+ duration5 = step7_time - step6_time
+ logger.info(f"{duration5=}")
+ fig_3 = px.line(df, x='bucket', y='avg_power')
+ step8_time = time.time()
+ duration6 = step8_time - step7_time
+ logger.info(f"{duration6=}")
+ fig_3.update_layout(
+ xaxis_title="",
+ yaxis_title="",
+ title=f"Export heute {duration5:.3f}, {duration6:.3f}",
+ yaxis=dict(ticksuffix=" W")
+ )
+ graph_html_3 = fig_3.to_html(full_html=False, default_height='30%')
+ stepZ_time = time.time()
+ duration7 = stepZ_time - stepX_time
+ logger.info(f"{duration7=}")
+
+ return render_template_string(f"""
+
+
+ Jahreswerte PV-Energie
+
+
+ {graph_html_1}
+ {graph_html_2}
+ {graph_html_3}
+
+
+
+
+ Query 1: | {duration1:.3f} s | Graph 1: | {duration2:.3f} s |
+
+ Query 2: | {duration3:.3f} s | Graph 2: | {duration4:.3f} s |
+
+ Query 3: | {duration5:.3f} s | Graph 3: | {duration6:.3f} s |
+
+ Total: | {duration7:.3f} s | | |
+
+
+
+
+
+ """)
+ except Exception as e:
+ raise Exception(f"Error when querying energy export values: {e}")
+ finally:
+ if dbh is not None:
+ dbh.close()
+
diff --git a/src/run.py b/src/run.py
index 9b8ad9e..8629a77 100644
--- a/src/run.py
+++ b/src/run.py
@@ -1,261 +1,10 @@
-from flask import Flask, session, g, render_template_string
-from flask_session import Session
-from flask_oidc import OpenIDConnect
from werkzeug.middleware.proxy_fix import ProxyFix
from loguru import logger
-import redis
-import json
-import os
-import plotly.express as px
-import plotly.graph_objects as po
-import pandas as pd
-import psycopg
-import sqlalchemy
-import time
-
-try:
- redis_url = os.environ['REDIS_URL']
- oidc_client_secrets = os.environ['OIDC_CLIENT_SECRETS']
- secret_key = os.environ['SECRET_KEY']
-except KeyError as e:
- logger.error(f"Required environment variable not set ({e})")
- raise e
-
-
-app = Flask(__name__)
-
-app.config.update({
- 'SECRET_KEY': secret_key,
- 'SESSION_TYPE': 'redis',
- 'SESSION_REDIS': redis.from_url(redis_url),
- 'OIDC_CLIENT_SECRETS': json.loads(oidc_client_secrets),
- 'OIDC_SCOPES': 'openid email',
- 'OIDC_USER_INFO_ENABLED': True,
- 'SESSION_USE_SIGNER': True,
-})
-Session(app)
-oidc = OpenIDConnect(app)
-
-@app.route('/token_debug', methods=['GET'])
-@oidc.require_login
-def token_debug():
- # Access Token vom Identity Provider abrufen
- access_token = oidc.get_access_token()
- return json.dumps({
- "access_token": access_token
- })
-
-@app.route('/')
-@oidc.require_login
-def index():
- try:
- stepX_time = time.time()
- dbh = psycopg.connect()
- engine = sqlalchemy.create_engine("postgresql+psycopg://", creator=lambda: dbh)
-
- step0_time = time.time()
- df = pd.read_sql("SELECT month, cast(year AS varchar), current_energy AS value FROM pv_energy_by_month", con=engine)
- step1_time = time.time()
- duration1 = step1_time - step0_time
- logger.info(f"{duration1=}")
- fig_1 = px.bar(df, x='month', y='value', color='year', barmode='group')
- step2_time = time.time()
- duration2 = step2_time - step1_time
- logger.info(f"{duration2=}")
- fig_1.update_layout(
- title=f"Jahreswerte Exportierte Energie {duration1:.3f}, {duration2:.3f}",
- xaxis_title="",
- yaxis_title="",
- legend_title="Jahr",
- xaxis=dict(
- tickmode="array",
- tickvals=list(range(1, 13)), # Monate 1–12
- ticktext=["Jan", "Feb", "Mär", "Apr", "Mai", "Jun", "Jul", "Aug", "Sep", "Okt", "Nov", "Dez"]
- ),
- yaxis=dict(ticksuffix=" kWh")
- )
- graph_html_1 = fig_1.to_html(full_html=False, default_height='30%')
-
- step3_time = time.time()
- df = pd.read_sql("SELECT time_bucket('5 minutes', time) AS bucket, AVG(power) AS avg_power FROM pv_power_v WHERE time >= date_trunc('day', now()) - '1 day'::interval AND time < date_trunc('day', now()) GROUP BY bucket ORDER BY bucket", con=engine)
- step4_time = time.time()
- duration3 = step4_time - step3_time
- logger.info(f"{duration3=}")
- fig_2 = px.line(df, x='bucket', y='avg_power')
- step5_time = time.time()
- duration4 = step5_time - step4_time
- logger.info(f"{duration4=}")
- fig_2.update_layout(
- xaxis_title="",
- yaxis_title="",
- title=f"Export gestern {duration3:.3f}, {duration4:.3f}",
- yaxis=dict(ticksuffix=" W")
- )
- graph_html_2 = fig_2.to_html(full_html=False, default_height='30%')
-
- step6_time = time.time()
- df = pd.read_sql("SELECT time_bucket('5 minutes', time) AS bucket, AVG(power) AS avg_power FROM pv_power_v WHERE time >= date_trunc('day', now()) AND time < date_trunc('day', now()) + '1 day'::interval GROUP BY bucket ORDER BY bucket", con=engine)
- step7_time = time.time()
- duration5 = step7_time - step6_time
- logger.info(f"{duration5=}")
- fig_3 = px.line(df, x='bucket', y='avg_power')
- step8_time = time.time()
- duration6 = step8_time - step7_time
- logger.info(f"{duration6=}")
- fig_3.update_layout(
- xaxis_title="",
- yaxis_title="",
- title=f"Export heute {duration5:.3f}, {duration6:.3f}",
- yaxis=dict(ticksuffix=" W")
- )
- graph_html_3 = fig_3.to_html(full_html=False, default_height='30%')
- stepZ_time = time.time()
- duration7 = stepZ_time - stepX_time
- logger.info(f"{duration7=}")
-
- return render_template_string(f"""
-
-
- Jahreswerte PV-Energie
-
-
- {graph_html_1}
- {graph_html_2}
- {graph_html_3}
-
-
-
-
- Query 1: | {duration1:.3f} s | Graph 1: | {duration2:.3f} s |
-
- Query 2: | {duration3:.3f} s | Graph 2: | {duration4:.3f} s |
-
- Query 3: | {duration5:.3f} s | Graph 3: | {duration6:.3f} s |
-
- Total: | {duration7:.3f} s | | |
-
-
-
-
-
- """)
- except Exception as e:
- raise Exception(f"Error when querying energy export values: {e}")
- finally:
- if dbh is not None:
- dbh.close()
-
-
-@app.route('/ntpserver')
-def ntpserver():
- try:
- dbh = psycopg.connect()
- engine = sqlalchemy.create_engine("postgresql+psycopg://", creator=lambda: dbh)
-
- query = """
- select time_bucket('5 minutes', time) as bucket,
- device,
- avg(cast(values->'rootdisp'->>'value' as float)) as rootdisp,
- max(cast(values->'stratum'->>'value' as int)) as stratum
- from measurements
- where time >= date_trunc('day', now()) AND time < date_trunc('day', now()) + '1 day'::interval and
- application = 'TSM' and attributes->>'Label' = 'david'
- group by bucket, device
- order by bucket, device
- """
-
- df = pd.read_sql(query, con=engine)
-
- fig = po.Figure()
- fig.add_trace(po.Scatter(x=df['bucket'], y=df['rootdisp'], mode='lines', name='Root Dispersion', yaxis='y1', line=dict(color='red')))
- fig.add_trace(po.Scatter(x=df['bucket'], y=df['stratum'], mode='lines', name='Stratum', yaxis='y2', line=dict(color='blue')))
-
- fig.update_layout(
- title='NTP Server Numbers',
-
- # Linke Y-Achse
- yaxis=dict(
- title='Root Dispersion',
- ticksuffix=' ms'
- ),
-
- # Rechte Y-Achse
- yaxis2=dict(
- title='Stratum',
- overlaying='y', # Legt die zweite Y-Achse über die erste
- side='right', # Setzt sie auf die rechte Seite
- tickmode='linear', # Stellt sicher, dass die Ticks in festen Intervallen sind
- dtick=1, # Zeigt nur ganzzahlige Ticks
- ),
-
- legend=dict(x=0.05, y=1) # Position der Legende
- )
-
- graph_html_1 = fig.to_html(full_html=False, default_height='30%')
-
- query = """
- select time_bucket('5 minutes', time) as bucket,
- device,
- avg(cast(values->'time-req-pkts'->>'value' as float)) as packets
- from measurements
- where time >= date_trunc('day', now()) AND time < date_trunc('day', now()) + '1 day'::interval and
- application = 'SNMP' and attributes->>'Label' = 'david'
- group by bucket, device
- order by bucket, device
- """
- df = pd.read_sql(query, con=engine)
- fig_2 = px.line(df, x='bucket', y='packets')
- fig_2.update_layout(
- xaxis_title="",
- yaxis_title="",
- yaxis_ticksuffix="p/s",
- title=f"Time Requests"
- )
- graph_html_2 = fig_2.to_html(full_html=False, default_height='30%')
-
- query = """
- select time_bucket('5 minutes', time) as bucket,
- device,
- avg(cast(values->'load1'->>'value' as float)) as loadaverage1min
- from measurements
- where time >= date_trunc('day', now()) AND time < date_trunc('day', now()) + '1 day'::interval and
- application = 'SNMP' and attributes->>'Label' = 'david'
- group by bucket, device
- order by bucket, device
- """
- df = pd.read_sql(query, con=engine)
- fig_3 = px.line(df, x='bucket', y='loadaverage1min')
- fig_3.update_layout(
- xaxis_title="",
- yaxis_title="",
- title=f"CPU Load"
- )
- graph_html_3 = fig_3.to_html(full_html=False, default_height='30%')
-
- return render_template_string(f"""
-
-
- NTP Server Numbers
-
-
- {graph_html_1}
- {graph_html_2}
- {graph_html_3}
-
-
- """)
- except Exception as e:
- raise Exception(f"Error when querying NTP server values: {e}")
- finally:
- if dbh is not None:
- dbh.close()
-
-
+from app import app
+import routes
+import debug_routes
if __name__ == '__main__':