Compare commits

..

43 Commits
0.0.6 ... 0.1.9

Author SHA1 Message Date
a42254cf95 import kaleido, 2
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-02-11 21:21:14 +01:00
3d05ea5d28 import kaleido
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-02-11 19:58:08 +01:00
52217c5251 still kaleido
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-02-11 19:53:48 +01:00
022d7a8926 merged
Some checks failed
ci/woodpecker/push/woodpecker Pipeline failed
2025-02-11 19:52:27 +01:00
698f926376 Merge branch 'main' of gitea.hottis.de:wn/pv-stats 2025-02-11 19:52:10 +01:00
f74ad50b94 kaleido 2025-02-11 19:52:04 +01:00
0c03d9f94e add kaleido, 3
Some checks failed
ci/woodpecker/push/woodpecker Pipeline failed
ci/woodpecker/tag/woodpecker Pipeline failed
2025-02-11 19:49:49 +01:00
eca5affd53 add kaleido, 2
Some checks failed
ci/woodpecker/push/woodpecker Pipeline failed
ci/woodpecker/tag/woodpecker Pipeline failed
2025-02-11 19:48:09 +01:00
6236673d28 add kaleido
Some checks failed
ci/woodpecker/push/woodpecker Pipeline failed
ci/woodpecker/tag/woodpecker Pipeline failed
2025-02-11 19:47:07 +01:00
4213dc7329 ntp as png
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-02-11 19:42:19 +01:00
2d3eab0db8 disable trivy for the moment
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-02-11 16:59:58 +01:00
73b55b05c4 ntp server numbers
Some checks failed
ci/woodpecker/tag/woodpecker Pipeline failed
ci/woodpecker/push/woodpecker Pipeline failed
2025-02-11 16:52:51 +01:00
aa74c02498 timing
All checks were successful
ci/woodpecker/tag/woodpecker Pipeline was successful
ci/woodpecker/push/woodpecker Pipeline was successful
2025-01-31 10:29:42 +01:00
6fd2bd0863 timing
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-01-31 10:19:03 +01:00
2c78fba3a6 timing
All checks were successful
ci/woodpecker/tag/woodpecker Pipeline was successful
ci/woodpecker/push/woodpecker Pipeline was successful
2025-01-30 17:37:55 +01:00
86b883569f timing
Some checks failed
ci/woodpecker/push/woodpecker Pipeline failed
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-01-30 17:24:18 +01:00
ba86a08632 timing
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-01-30 17:16:08 +01:00
0b61a18eb1 timing
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-01-30 17:10:01 +01:00
1418603007 timing
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-01-30 16:59:36 +01:00
9926c89ef2 timing
All checks were successful
ci/woodpecker/tag/woodpecker Pipeline was successful
ci/woodpecker/push/woodpecker Pipeline was successful
2025-01-30 16:53:07 +01:00
fc6f407a52 timing
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-01-30 16:42:09 +01:00
0e9cb0a7f8 timing
All checks were successful
ci/woodpecker/tag/woodpecker Pipeline was successful
ci/woodpecker/push/woodpecker Pipeline was successful
2025-01-30 14:16:01 +01:00
e3b2ea704d timing
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-01-30 13:35:08 +01:00
8bd4a4b695 timing
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-01-30 13:30:45 +01:00
89f3cbb5d1 timing
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-01-30 10:31:36 +01:00
37c4a373b7 timing
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-01-30 10:25:56 +01:00
408cff442c timing
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-01-30 10:19:07 +01:00
67b88aa2a1 timing
Some checks failed
ci/woodpecker/push/woodpecker Pipeline failed
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-01-30 10:03:16 +01:00
7b0238b4a5 use new decrypt approach
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-01-29 17:28:12 +01:00
16771227bb adjust configuration
Some checks failed
ci/woodpecker/push/woodpecker Pipeline failed
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-01-29 15:40:34 +01:00
aa97e3cdd3 adjust configuration
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-01-29 15:27:35 +01:00
2c93c7ec47 token debug
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-01-28 21:57:23 +01:00
e0b1c469d2 token debug
Some checks failed
ci/woodpecker/push/woodpecker Pipeline failed
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-01-28 21:54:13 +01:00
d646090802 token debug
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-01-28 21:49:27 +01:00
c4fd8b2cfd fix
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-01-28 14:51:56 +01:00
37ce3d47ca fix
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-01-28 14:41:18 +01:00
988f24994b fix
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-01-28 14:28:24 +01:00
b8dd70e5ae fix
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-01-28 14:20:09 +01:00
6ba9352edc second graph
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-01-28 14:13:30 +01:00
aaa23a9839 only one graph
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-01-28 13:54:11 +01:00
812989df47 two graphs
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-01-28 13:49:26 +01:00
452161ff03 disable trivy warning
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
2025-01-28 13:47:24 +01:00
cd3bf25fa1 two graphs
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-01-28 13:44:03 +01:00
6 changed files with 161 additions and 38 deletions

View File

@ -13,14 +13,8 @@ steps:
dockerfile: Dockerfile
when:
- event: [push, tag]
scan_image:
image: aquasec/trivy
commands:
- trivy image $FORGE_NAME/$CI_REPO:$CI_COMMIT_SHA --quiet --exit-code 1
when:
- event: [push, tag]
deploy:
image: quay.io/wollud1969/k8s-admin-helper:0.1.3
image: quay.io/wollud1969/k8s-admin-helper:0.2.1
environment:
KUBE_CONFIG_CONTENT:
from_secret: kube_config

View File

@ -4,6 +4,8 @@ metadata:
name: pv-stats
labels:
app: pv-stats
annotations:
secret.reloader.stakater.com/reload: pv-stats
spec:
replicas: 1
selector:

View File

@ -19,10 +19,11 @@ kubectl create namespace $NAMESPACE \
-o yaml | \
kubectl -f - apply
SECRETS_FILE=`mktemp`
gpg --decrypt --passphrase $GPG_PASSPHRASE --yes --batch --homedir /tmp/.gnupg --output $SECRETS_FILE secrets.asc
. $SECRETS_FILE
rm $SECRETS_FILE
# SECRETS_FILE=`mktemp`
# gpg --decrypt --passphrase $GPG_PASSPHRASE --yes --batch --homedir /tmp/.gnupg --output $SECRETS_FILE secrets.asc
# . $SECRETS_FILE
# rm $SECRETS_FILE
eval "`cat secrets.asc | /usr/local/bin/decrypt-secrets.sh`"
kubectl create secret generic pv-stats \
--dry-run=client \

View File

@ -1,15 +1,15 @@
-----BEGIN PGP MESSAGE-----
jA0ECQMCjSYtdE7M+rT/0sEmAbK6tjOB7cduweJyS3sULIl017BrOM1HGV1v4QkN
NWtceKHpJEFmk1ZFh1EmyS724bDxtVLm8qJN4VzV+cLPa8/IiTzOHJEg4PDRcc0U
LRCZOb5PtfKWBcQdRshT3JhIxxOKG53ZKOHJ2VodZ/iqmpSbjZ39GtzLJoXlXwCJ
jHAHgADdvq5J7joSaozjxVCXQI2nOmlEcZosZVd6LZDvemWtmci4ARJtOEqad4Wi
mbleupBF18pA7b63hfmHocO89b9pTpUZWw10b0SHrUiKZG6kHeQXE8GGu+4HiRcG
xJN/yYe2Ly4tHwT54gQ7ytLrxOnJUQKelSpq370t/lrGTf8L4b2eq+tED8/7z5Ho
E12VDreYLFaS2IeJCFBefiGGbKgttPcHZvDCPCUpCpfSShlqtTaidwsTq2lPFGyo
0Lz9NHsqp4/U+4nabCWNVWhnZhwqIPYOt/kKsYvjq40U29q7RORnxz8l/Ym4vB1P
kLeouElJvAz6vt/1+nFQJb65YykTqKQpbKgbziFiUFwb37QzBqRO7tuv3/MUzJ1Q
KB7+IOQ43vFbAK6DZAhJZtAspVYiZ95niVxepaghzf33/FquhtZ+XpFEBDv525z7
pML7BxFxuwF5q7HBTPtc4IeJBgGi16wm
=K6Qq
jA0ECQMC4tvOFCp2PT7/0sE+AcZmiGwroHYdWW/vJA1sCoMrM2oW8xUc1KndDGto
dFev2KcoZ4FRL9liCrJ7on773bZFCTDu2xiBNMeKF1p8drub1Ej5l61Oq15mLTYf
XjXknFiWWq4PTzhhy53zvDyIV5yIcGfjotpGC83/qH5CBWIcCw9PZHI5+uHRVRKL
OpdTpekJ8ljPAGQ4F3vbOeBbG1PMoclx8r5SpKxLwQco2iaXd71dXHtPkEnLujd4
ZRCthVNVfxrIIRcTJfgxewz2oJWYF9008nmtxpKzqfBtNpFIdBtnTTVvv2lfcVuS
S4eav9ljPPd/exZaT7fOGD/kuCdiiu6e0yGLzo2ykf0uBy7hG7ZJg8TH5e+LMBm9
Q1OFD+5LWeEsOEdSTlT5UbV5EaVEiawKWQn7rMZOyvBNiPwLaOlquHirWoll5eIm
OHgBVN0FiY/righpIoei/KP705FIg+hrpMUvc29PahiL8dgsxJycnKIo4t+2/nac
H144hP/rqBeaobG05TkZIr+Cpt4rpwwfNWOHgmNFHVpxdmPfoeAmpT4nz760hTEN
2ol4Qh8xL7n4GFiCpNg8tNZiZkPPa/aRUAFxgdq5beossvKukxCexQmlCFvxITSG
x9RwssFMnT+wqTuzBN8neBeEF1d9AtAFQKPtg3wkMUyJAlYkxiGS/2NJuYDVpmEQ
=Wr4f
-----END PGP MESSAGE-----

View File

@ -16,6 +16,7 @@ idna==3.10
importlib_metadata==8.6.1
itsdangerous==2.2.0
Jinja2==3.1.5
kaleido==1.0.0rc0
loguru==0.7.3
MarkupSafe==3.0.2
msgspec==0.19.0

View File

@ -1,15 +1,20 @@
from flask import Flask, session, g, render_template_string
from flask import Flask, session, g, render_template_string, Response
from flask_session import Session
from flask_oidc import OpenIDConnect
import kaleido
from werkzeug.middleware.proxy_fix import ProxyFix
from loguru import logger
import redis
import json
import os
import plotly.express as px
import plotly.graph_objects as po
import plotly.io as pio
import io
import pandas as pd
import psycopg
import sqlalchemy
import time
try:
redis_url = os.environ['REDIS_URL']
@ -34,25 +39,35 @@ app.config.update({
Session(app)
oidc = OpenIDConnect(app)
@app.route('/token_debug', methods=['GET'])
@oidc.require_login
def token_debug():
# Access Token vom Identity Provider abrufen
access_token = oidc.get_access_token()
return json.dumps({
"access_token": access_token
})
@app.route('/')
@oidc.require_login
def index():
counter = int(session.get('counter', '0'))
counter += 1
session['counter'] = f"{counter}"
return f"Hello, Flask! Called for the {counter}. time."
@app.route('/plot')
@oidc.require_login
def plot():
try:
stepX_time = time.time()
dbh = psycopg.connect()
engine = sqlalchemy.create_engine("postgresql+psycopg://", creator=lambda: dbh)
step0_time = time.time()
df = pd.read_sql("SELECT month, cast(year AS varchar), current_energy AS value FROM pv_energy_by_month", con=engine)
fig = px.bar(df, x='month', y='value', color='year', barmode='group')
fig.update_layout(
title="Jahreswerte Exportierte Energie",
xaxis_title="Monat",
step1_time = time.time()
duration1 = step1_time - step0_time
logger.info(f"{duration1=}")
fig_1 = px.bar(df, x='month', y='value', color='year', barmode='group')
step2_time = time.time()
duration2 = step2_time - step1_time
logger.info(f"{duration2=}")
fig_1.update_layout(
title=f"Jahreswerte Exportierte Energie {duration1:.3f}, {duration2:.3f}",
xaxis_title="",
yaxis_title="",
legend_title="Jahr",
xaxis=dict(
@ -62,7 +77,44 @@ def plot():
),
yaxis=dict(ticksuffix=" kWh")
)
graph_html = fig.to_html(full_html=False)
graph_html_1 = fig_1.to_html(full_html=False, default_height='30%')
step3_time = time.time()
df = pd.read_sql("SELECT time_bucket('5 minutes', time) AS bucket, AVG(power) AS avg_power FROM pv_power_v WHERE time >= date_trunc('day', now()) - '1 day'::interval AND time < date_trunc('day', now()) GROUP BY bucket ORDER BY bucket", con=engine)
step4_time = time.time()
duration3 = step4_time - step3_time
logger.info(f"{duration3=}")
fig_2 = px.line(df, x='bucket', y='avg_power')
step5_time = time.time()
duration4 = step5_time - step4_time
logger.info(f"{duration4=}")
fig_2.update_layout(
xaxis_title="",
yaxis_title="",
title=f"Export gestern {duration3:.3f}, {duration4:.3f}",
yaxis=dict(ticksuffix=" W")
)
graph_html_2 = fig_2.to_html(full_html=False, default_height='30%')
step6_time = time.time()
df = pd.read_sql("SELECT time_bucket('5 minutes', time) AS bucket, AVG(power) AS avg_power FROM pv_power_v WHERE time >= date_trunc('day', now()) AND time < date_trunc('day', now()) + '1 day'::interval GROUP BY bucket ORDER BY bucket", con=engine)
step7_time = time.time()
duration5 = step7_time - step6_time
logger.info(f"{duration5=}")
fig_3 = px.line(df, x='bucket', y='avg_power')
step8_time = time.time()
duration6 = step8_time - step7_time
logger.info(f"{duration6=}")
fig_3.update_layout(
xaxis_title="",
yaxis_title="",
title=f"Export heute {duration5:.3f}, {duration6:.3f}",
yaxis=dict(ticksuffix=" W")
)
graph_html_3 = fig_3.to_html(full_html=False, default_height='30%')
stepZ_time = time.time()
duration7 = stepZ_time - stepX_time
logger.info(f"{duration7=}")
return render_template_string(f"""
<html>
@ -70,7 +122,26 @@ def plot():
<title>Jahreswerte PV-Energie</title>
</head>
<body>
{graph_html}
{graph_html_1}
{graph_html_2}
{graph_html_3}
<div style="height:9vh; background-color:lightgrey; font-family: Courier, Consolas, monospace;">
<table style="border-collapse: collapse;">
<style>
td.smallsep {{ padding-right: 10px }}
td.largesep {{ padding-right: 30px }}
</style>
<tr>
<td class="smallsep">Query 1:</td><td class="largesep"> {duration1:.3f} s</td><td class="smallsep">Graph 1:</td><td> {duration2:.3f} s</td>
</tr><tr>
<td class="smallsep">Query 2:</td><td class="largesep"> {duration3:.3f} s</td><td class="smallsep">Graph 2:</td><td> {duration4:.3f} s</td>
</tr><tr>
<td class="smallsep">Query 3:</td><td class="largesep"> {duration5:.3f} s</td><td class="smallsep">Graph 3:</td><td> {duration6:.3f} s</td>
</tr><tr>
<td class="smallsep">Total:</td><td> {duration7:.3f} s</td><td></td><td></td>
</tr>
</table>
</div>
</body>
</html>
""")
@ -81,6 +152,60 @@ def plot():
dbh.close()
@app.route('/ntpserver')
def ntpserver():
try:
dbh = psycopg.connect()
engine = sqlalchemy.create_engine("postgresql+psycopg://", creator=lambda: dbh)
query = """
select time_bucket('5 minutes', time) as bucket,
device,
avg(cast(values->'rootdisp'->>'value' as float)) as rootdisp,
avg(cast(values->'stratum'->>'value' as int)) as stratum
from measurements
where time >= date_trunc('day', now()) AND time < date_trunc('day', now()) + '1 day'::interval and
application = 'TSM' and attributes->>'Label' = 'david'
group by bucket, device
order by bucket, device
"""
df = pd.read_sql(query, con=engine)
fig = po.Figure()
fig.add_trace(po.Scatter(x=df['bucket'], y=df['rootdisp'], mode='lines', name='Root Dispersion', yaxis='y1', line=dict(color='red')))
fig.add_trace(po.Scatter(x=df['bucket'], y=df['stratum'], mode='lines', name='Stratum', yaxis='y2', line=dict(color='blue')))
fig.update_layout(
# Linke Y-Achse
yaxis=dict(
title='Root Dispersion'
),
# Rechte Y-Achse
yaxis2=dict(
title='Stratum',
overlaying='y', # Legt die zweite Y-Achse über die erste
side='right', # Setzt sie auf die rechte Seite
tickmode='linear', # Stellt sicher, dass die Ticks in festen Intervallen sind
dtick=1, # Zeigt nur ganzzahlige Ticks
),
legend=dict(x=0.05, y=1) # Position der Legende
)
img = fig.to_image(format='png')
return Response(img, mimetype='image/png')
except Exception as e:
raise Exception(f"Error when querying NTP server values: {e}")
finally:
if dbh is not None:
dbh.close()
if __name__ == '__main__':