from flask import Flask, session, g, render_template_string from flask_session import Session from flask_oidc import OpenIDConnect from werkzeug.middleware.proxy_fix import ProxyFix from loguru import logger import redis import json import os import plotly.express as px import plotly.graph_objects as po import pandas as pd import psycopg import sqlalchemy import time try: redis_url = os.environ['REDIS_URL'] oidc_client_secrets = os.environ['OIDC_CLIENT_SECRETS'] secret_key = os.environ['SECRET_KEY'] except KeyError as e: logger.error(f"Required environment variable not set ({e})") raise e app = Flask(__name__) app.config.update({ 'SECRET_KEY': secret_key, 'SESSION_TYPE': 'redis', 'SESSION_REDIS': redis.from_url(redis_url), 'OIDC_CLIENT_SECRETS': json.loads(oidc_client_secrets), 'OIDC_SCOPES': 'openid email', 'OIDC_USER_INFO_ENABLED': True, 'SESSION_USE_SIGNER': True, }) Session(app) oidc = OpenIDConnect(app) @app.route('/token_debug', methods=['GET']) @oidc.require_login def token_debug(): # Access Token vom Identity Provider abrufen access_token = oidc.get_access_token() return json.dumps({ "access_token": access_token }) @app.route('/') @oidc.require_login def index(): try: stepX_time = time.time() dbh = psycopg.connect() engine = sqlalchemy.create_engine("postgresql+psycopg://", creator=lambda: dbh) step0_time = time.time() df = pd.read_sql("SELECT month, cast(year AS varchar), current_energy AS value FROM pv_energy_by_month", con=engine) step1_time = time.time() duration1 = step1_time - step0_time logger.info(f"{duration1=}") fig_1 = px.bar(df, x='month', y='value', color='year', barmode='group') step2_time = time.time() duration2 = step2_time - step1_time logger.info(f"{duration2=}") fig_1.update_layout( title=f"Jahreswerte Exportierte Energie {duration1:.3f}, {duration2:.3f}", xaxis_title="", yaxis_title="", legend_title="Jahr", xaxis=dict( tickmode="array", tickvals=list(range(1, 13)), # Monate 1–12 ticktext=["Jan", "Feb", "Mär", "Apr", "Mai", "Jun", "Jul", "Aug", "Sep", "Okt", "Nov", "Dez"] ), yaxis=dict(ticksuffix=" kWh") ) graph_html_1 = fig_1.to_html(full_html=False, default_height='30%') step3_time = time.time() df = pd.read_sql("SELECT time_bucket('5 minutes', time) AS bucket, AVG(power) AS avg_power FROM pv_power_v WHERE time >= date_trunc('day', now()) - '1 day'::interval AND time < date_trunc('day', now()) GROUP BY bucket ORDER BY bucket", con=engine) step4_time = time.time() duration3 = step4_time - step3_time logger.info(f"{duration3=}") fig_2 = px.line(df, x='bucket', y='avg_power') step5_time = time.time() duration4 = step5_time - step4_time logger.info(f"{duration4=}") fig_2.update_layout( xaxis_title="", yaxis_title="", title=f"Export gestern {duration3:.3f}, {duration4:.3f}", yaxis=dict(ticksuffix=" W") ) graph_html_2 = fig_2.to_html(full_html=False, default_height='30%') step6_time = time.time() df = pd.read_sql("SELECT time_bucket('5 minutes', time) AS bucket, AVG(power) AS avg_power FROM pv_power_v WHERE time >= date_trunc('day', now()) AND time < date_trunc('day', now()) + '1 day'::interval GROUP BY bucket ORDER BY bucket", con=engine) step7_time = time.time() duration5 = step7_time - step6_time logger.info(f"{duration5=}") fig_3 = px.line(df, x='bucket', y='avg_power') step8_time = time.time() duration6 = step8_time - step7_time logger.info(f"{duration6=}") fig_3.update_layout( xaxis_title="", yaxis_title="", title=f"Export heute {duration5:.3f}, {duration6:.3f}", yaxis=dict(ticksuffix=" W") ) graph_html_3 = fig_3.to_html(full_html=False, default_height='30%') stepZ_time = time.time() duration7 = stepZ_time - stepX_time logger.info(f"{duration7=}") return render_template_string(f""" Jahreswerte PV-Energie {graph_html_1} {graph_html_2} {graph_html_3}
Query 1: {duration1:.3f} sGraph 1: {duration2:.3f} s
Query 2: {duration3:.3f} sGraph 2: {duration4:.3f} s
Query 3: {duration5:.3f} sGraph 3: {duration6:.3f} s
Total: {duration7:.3f} s
""") except Exception as e: raise Exception(f"Error when querying energy export values: {e}") finally: if dbh is not None: dbh.close() @app.route('/ntpserver') def ntpserver(): try: dbh = psycopg.connect() engine = sqlalchemy.create_engine("postgresql+psycopg://", creator=lambda: dbh) query = """ select time_bucket('5 minutes', time) as bucket, device, avg(cast(values->'rootdisp'->>'value' as float)) as rootdisp, avg(cast(values->'stratum'->>'value' as int)) as stratum from measurements where time >= date_trunc('day', now()) AND time < date_trunc('day', now()) + '1 day'::interval and application = 'TSM' and attributes->>'Label' = 'david' group by bucket, device order by bucket, device """ df = pd.read_sql(query, con=engine) fig = po.Figure() fig.add_trace(po.Scatter(x=df['bucket'], y=df['rootdisp'], mode='lines', name='Root Dispersion', yaxis='y1', line=dict(color='red'))) fig.add_trace(po.Scatter(x=df['bucket'], y=df['stratum'], mode='lines', name='Stratum', yaxis='y2', line=dict(color='blue'))) fig.update_layout( title='NTP Server Numbers', # Linke Y-Achse yaxis=dict( title='Root Dispersion' ), # Rechte Y-Achse yaxis2=dict( title='Stratum', overlaying='y', # Legt die zweite Y-Achse über die erste side='right', # Setzt sie auf die rechte Seite tickmode='linear', # Stellt sicher, dass die Ticks in festen Intervallen sind dtick=1, # Zeigt nur ganzzahlige Ticks ), legend=dict(x=0.05, y=1) # Position der Legende ) graph_html = fig.to_html(full_html=False, default_height='30%') return render_template_string(f""" NTP Server Numbers {graph_html} """) except Exception as e: raise Exception(f"Error when querying NTP server values: {e}") finally: if dbh is not None: dbh.close() if __name__ == '__main__': app.run(port=8080) else: exposed_app = ProxyFix(app, x_for=1, x_host=1)