Compare commits
23 Commits
Author | SHA1 | Date | |
---|---|---|---|
3d05ea5d28
|
|||
52217c5251
|
|||
022d7a8926 | |||
698f926376 | |||
f74ad50b94 | |||
0c03d9f94e
|
|||
eca5affd53
|
|||
6236673d28
|
|||
4213dc7329
|
|||
2d3eab0db8
|
|||
73b55b05c4
|
|||
aa74c02498 | |||
6fd2bd0863 | |||
2c78fba3a6 | |||
86b883569f | |||
ba86a08632 | |||
0b61a18eb1 | |||
1418603007 | |||
9926c89ef2 | |||
fc6f407a52 | |||
0e9cb0a7f8 | |||
e3b2ea704d | |||
8bd4a4b695 |
@ -13,12 +13,6 @@ steps:
|
|||||||
dockerfile: Dockerfile
|
dockerfile: Dockerfile
|
||||||
when:
|
when:
|
||||||
- event: [push, tag]
|
- event: [push, tag]
|
||||||
scan_image:
|
|
||||||
image: aquasec/trivy
|
|
||||||
commands:
|
|
||||||
- TRIVY_DISABLE_VEX_NOTICE=1 trivy image $FORGE_NAME/$CI_REPO:$CI_COMMIT_SHA --quiet --exit-code 1
|
|
||||||
when:
|
|
||||||
- event: [push, tag]
|
|
||||||
deploy:
|
deploy:
|
||||||
image: quay.io/wollud1969/k8s-admin-helper:0.2.1
|
image: quay.io/wollud1969/k8s-admin-helper:0.2.1
|
||||||
environment:
|
environment:
|
||||||
|
@ -16,6 +16,7 @@ idna==3.10
|
|||||||
importlib_metadata==8.6.1
|
importlib_metadata==8.6.1
|
||||||
itsdangerous==2.2.0
|
itsdangerous==2.2.0
|
||||||
Jinja2==3.1.5
|
Jinja2==3.1.5
|
||||||
|
kaleido==1.0.0rc0
|
||||||
loguru==0.7.3
|
loguru==0.7.3
|
||||||
MarkupSafe==3.0.2
|
MarkupSafe==3.0.2
|
||||||
msgspec==0.19.0
|
msgspec==0.19.0
|
||||||
|
119
src/run.py
119
src/run.py
@ -1,12 +1,14 @@
|
|||||||
from flask import Flask, session, g, render_template_string
|
from flask import Flask, session, g, render_template_string, Response
|
||||||
from flask_session import Session
|
from flask_session import Session
|
||||||
from flask_oidc import OpenIDConnect
|
from flask_oidc import OpenIDConnect
|
||||||
|
import kaleido
|
||||||
from werkzeug.middleware.proxy_fix import ProxyFix
|
from werkzeug.middleware.proxy_fix import ProxyFix
|
||||||
from loguru import logger
|
from loguru import logger
|
||||||
import redis
|
import redis
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
import plotly.express as px
|
import plotly.express as px
|
||||||
|
import plotly.graph_objects as po
|
||||||
import pandas as pd
|
import pandas as pd
|
||||||
import psycopg
|
import psycopg
|
||||||
import sqlalchemy
|
import sqlalchemy
|
||||||
@ -48,16 +50,19 @@ def token_debug():
|
|||||||
@oidc.require_login
|
@oidc.require_login
|
||||||
def index():
|
def index():
|
||||||
try:
|
try:
|
||||||
|
stepX_time = time.time()
|
||||||
dbh = psycopg.connect()
|
dbh = psycopg.connect()
|
||||||
engine = sqlalchemy.create_engine("postgresql+psycopg://", creator=lambda: dbh)
|
engine = sqlalchemy.create_engine("postgresql+psycopg://", creator=lambda: dbh)
|
||||||
|
|
||||||
start_time = time.time()
|
step0_time = time.time()
|
||||||
df = pd.read_sql("SELECT month, cast(year AS varchar), current_energy AS value FROM pv_energy_by_month", con=engine)
|
df = pd.read_sql("SELECT month, cast(year AS varchar), current_energy AS value FROM pv_energy_by_month", con=engine)
|
||||||
step1_time = time.time()
|
step1_time = time.time()
|
||||||
duration1 = step1_time - start_time
|
duration1 = step1_time - step0_time
|
||||||
|
logger.info(f"{duration1=}")
|
||||||
fig_1 = px.bar(df, x='month', y='value', color='year', barmode='group')
|
fig_1 = px.bar(df, x='month', y='value', color='year', barmode='group')
|
||||||
step2_time = time.time()
|
step2_time = time.time()
|
||||||
duration2 = step2_time - step1_time
|
duration2 = step2_time - step1_time
|
||||||
|
logger.info(f"{duration2=}")
|
||||||
fig_1.update_layout(
|
fig_1.update_layout(
|
||||||
title=f"Jahreswerte Exportierte Energie {duration1:.3f}, {duration2:.3f}",
|
title=f"Jahreswerte Exportierte Energie {duration1:.3f}, {duration2:.3f}",
|
||||||
xaxis_title="",
|
xaxis_title="",
|
||||||
@ -70,37 +75,44 @@ def index():
|
|||||||
),
|
),
|
||||||
yaxis=dict(ticksuffix=" kWh")
|
yaxis=dict(ticksuffix=" kWh")
|
||||||
)
|
)
|
||||||
graph_html_1 = fig_1.to_html(full_html=False, default_height='33%')
|
graph_html_1 = fig_1.to_html(full_html=False, default_height='30%')
|
||||||
|
|
||||||
start_time = time.time()
|
step3_time = time.time()
|
||||||
df = pd.read_sql("SELECT time_bucket('5 minutes', time) AS bucket, AVG(power) AS avg_power FROM pv_power_v WHERE time >= date_trunc('day', now()) - '1 day'::interval AND time < date_trunc('day', now()) GROUP BY bucket ORDER BY bucket", con=engine)
|
df = pd.read_sql("SELECT time_bucket('5 minutes', time) AS bucket, AVG(power) AS avg_power FROM pv_power_v WHERE time >= date_trunc('day', now()) - '1 day'::interval AND time < date_trunc('day', now()) GROUP BY bucket ORDER BY bucket", con=engine)
|
||||||
step1_time = time.time()
|
step4_time = time.time()
|
||||||
duration1 = step1_time - start_time
|
duration3 = step4_time - step3_time
|
||||||
|
logger.info(f"{duration3=}")
|
||||||
fig_2 = px.line(df, x='bucket', y='avg_power')
|
fig_2 = px.line(df, x='bucket', y='avg_power')
|
||||||
step2_time = time.time()
|
step5_time = time.time()
|
||||||
duration2 = step2_time - step1_time
|
duration4 = step5_time - step4_time
|
||||||
|
logger.info(f"{duration4=}")
|
||||||
fig_2.update_layout(
|
fig_2.update_layout(
|
||||||
xaxis_title="",
|
xaxis_title="",
|
||||||
yaxis_title="",
|
yaxis_title="",
|
||||||
title=f"Export gestern {duration1:.3f}, {duration2:.3f}",
|
title=f"Export gestern {duration3:.3f}, {duration4:.3f}",
|
||||||
yaxis=dict(ticksuffix=" W")
|
yaxis=dict(ticksuffix=" W")
|
||||||
)
|
)
|
||||||
graph_html_2 = fig_2.to_html(full_html=False, default_height='33%')
|
graph_html_2 = fig_2.to_html(full_html=False, default_height='30%')
|
||||||
|
|
||||||
start_time = time.time()
|
step6_time = time.time()
|
||||||
df = pd.read_sql("SELECT time_bucket('5 minutes', time) AS bucket, AVG(power) AS avg_power FROM pv_power_v WHERE time >= date_trunc('day', now()) AND time < date_trunc('day', now()) + '1 day'::interval GROUP BY bucket ORDER BY bucket", con=engine)
|
df = pd.read_sql("SELECT time_bucket('5 minutes', time) AS bucket, AVG(power) AS avg_power FROM pv_power_v WHERE time >= date_trunc('day', now()) AND time < date_trunc('day', now()) + '1 day'::interval GROUP BY bucket ORDER BY bucket", con=engine)
|
||||||
step1_time = time.time()
|
step7_time = time.time()
|
||||||
duration1 = step1_time - start_time
|
duration5 = step7_time - step6_time
|
||||||
|
logger.info(f"{duration5=}")
|
||||||
fig_3 = px.line(df, x='bucket', y='avg_power')
|
fig_3 = px.line(df, x='bucket', y='avg_power')
|
||||||
step2_time = time.time()
|
step8_time = time.time()
|
||||||
duration2 = step2_time - step1_time
|
duration6 = step8_time - step7_time
|
||||||
|
logger.info(f"{duration6=}")
|
||||||
fig_3.update_layout(
|
fig_3.update_layout(
|
||||||
xaxis_title="",
|
xaxis_title="",
|
||||||
yaxis_title="",
|
yaxis_title="",
|
||||||
title=f"Export heute {duration1:.3f}, {duration2:.3f}",
|
title=f"Export heute {duration5:.3f}, {duration6:.3f}",
|
||||||
yaxis=dict(ticksuffix=" W")
|
yaxis=dict(ticksuffix=" W")
|
||||||
)
|
)
|
||||||
graph_html_3 = fig_3.to_html(full_html=False, default_height='33%')
|
graph_html_3 = fig_3.to_html(full_html=False, default_height='30%')
|
||||||
|
stepZ_time = time.time()
|
||||||
|
duration7 = stepZ_time - stepX_time
|
||||||
|
logger.info(f"{duration7=}")
|
||||||
|
|
||||||
return render_template_string(f"""
|
return render_template_string(f"""
|
||||||
<html>
|
<html>
|
||||||
@ -111,6 +123,23 @@ def index():
|
|||||||
{graph_html_1}
|
{graph_html_1}
|
||||||
{graph_html_2}
|
{graph_html_2}
|
||||||
{graph_html_3}
|
{graph_html_3}
|
||||||
|
<div style="height:9vh; background-color:lightgrey; font-family: Courier, Consolas, monospace;">
|
||||||
|
<table style="border-collapse: collapse;">
|
||||||
|
<style>
|
||||||
|
td.smallsep {{ padding-right: 10px }}
|
||||||
|
td.largesep {{ padding-right: 30px }}
|
||||||
|
</style>
|
||||||
|
<tr>
|
||||||
|
<td class="smallsep">Query 1:</td><td class="largesep"> {duration1:.3f} s</td><td class="smallsep">Graph 1:</td><td> {duration2:.3f} s</td>
|
||||||
|
</tr><tr>
|
||||||
|
<td class="smallsep">Query 2:</td><td class="largesep"> {duration3:.3f} s</td><td class="smallsep">Graph 2:</td><td> {duration4:.3f} s</td>
|
||||||
|
</tr><tr>
|
||||||
|
<td class="smallsep">Query 3:</td><td class="largesep"> {duration5:.3f} s</td><td class="smallsep">Graph 3:</td><td> {duration6:.3f} s</td>
|
||||||
|
</tr><tr>
|
||||||
|
<td class="smallsep">Total:</td><td> {duration7:.3f} s</td><td></td><td></td>
|
||||||
|
</tr>
|
||||||
|
</table>
|
||||||
|
</div>
|
||||||
</body>
|
</body>
|
||||||
</html>
|
</html>
|
||||||
""")
|
""")
|
||||||
@ -121,6 +150,60 @@ def index():
|
|||||||
dbh.close()
|
dbh.close()
|
||||||
|
|
||||||
|
|
||||||
|
@app.route('/ntpserver')
|
||||||
|
def ntpserver():
|
||||||
|
try:
|
||||||
|
dbh = psycopg.connect()
|
||||||
|
engine = sqlalchemy.create_engine("postgresql+psycopg://", creator=lambda: dbh)
|
||||||
|
|
||||||
|
query = """
|
||||||
|
select time_bucket('5 minutes', time) as bucket,
|
||||||
|
device,
|
||||||
|
avg(cast(values->'rootdisp'->>'value' as float)) as rootdisp,
|
||||||
|
avg(cast(values->'stratum'->>'value' as int)) as stratum
|
||||||
|
from measurements
|
||||||
|
where time >= date_trunc('day', now()) AND time < date_trunc('day', now()) + '1 day'::interval and
|
||||||
|
application = 'TSM' and attributes->>'Label' = 'david'
|
||||||
|
group by bucket, device
|
||||||
|
order by bucket, device
|
||||||
|
"""
|
||||||
|
|
||||||
|
df = pd.read_sql(query, con=engine)
|
||||||
|
|
||||||
|
fig = po.Figure()
|
||||||
|
fig.add_trace(po.Scatter(x=df['bucket'], y=df['rootdisp'], mode='lines', name='Root Dispersion', yaxis='y1', line=dict(color='red')))
|
||||||
|
fig.add_trace(po.Scatter(x=df['bucket'], y=df['stratum'], mode='lines', name='Stratum', yaxis='y2', line=dict(color='blue')))
|
||||||
|
|
||||||
|
fig.update_layout(
|
||||||
|
# Linke Y-Achse
|
||||||
|
yaxis=dict(
|
||||||
|
title='Root Dispersion'
|
||||||
|
),
|
||||||
|
|
||||||
|
# Rechte Y-Achse
|
||||||
|
yaxis2=dict(
|
||||||
|
title='Stratum',
|
||||||
|
overlaying='y', # Legt die zweite Y-Achse über die erste
|
||||||
|
side='right', # Setzt sie auf die rechte Seite
|
||||||
|
tickmode='linear', # Stellt sicher, dass die Ticks in festen Intervallen sind
|
||||||
|
dtick=1, # Zeigt nur ganzzahlige Ticks
|
||||||
|
),
|
||||||
|
|
||||||
|
legend=dict(x=0.05, y=1) # Position der Legende
|
||||||
|
)
|
||||||
|
|
||||||
|
img = fig.to_image(format='png')
|
||||||
|
return Response(img, mimetype='image/png')
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
raise Exception(f"Error when querying NTP server values: {e}")
|
||||||
|
finally:
|
||||||
|
if dbh is not None:
|
||||||
|
dbh.close()
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
|
Reference in New Issue
Block a user