Compare commits

...

28 Commits

Author SHA1 Message Date
4213dc7329 ntp as png
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-02-11 19:42:19 +01:00
2d3eab0db8 disable trivy for the moment
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-02-11 16:59:58 +01:00
73b55b05c4 ntp server numbers
Some checks failed
ci/woodpecker/tag/woodpecker Pipeline failed
ci/woodpecker/push/woodpecker Pipeline failed
2025-02-11 16:52:51 +01:00
aa74c02498 timing
All checks were successful
ci/woodpecker/tag/woodpecker Pipeline was successful
ci/woodpecker/push/woodpecker Pipeline was successful
2025-01-31 10:29:42 +01:00
6fd2bd0863 timing
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-01-31 10:19:03 +01:00
2c78fba3a6 timing
All checks were successful
ci/woodpecker/tag/woodpecker Pipeline was successful
ci/woodpecker/push/woodpecker Pipeline was successful
2025-01-30 17:37:55 +01:00
86b883569f timing
Some checks failed
ci/woodpecker/push/woodpecker Pipeline failed
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-01-30 17:24:18 +01:00
ba86a08632 timing
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-01-30 17:16:08 +01:00
0b61a18eb1 timing
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-01-30 17:10:01 +01:00
1418603007 timing
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-01-30 16:59:36 +01:00
9926c89ef2 timing
All checks were successful
ci/woodpecker/tag/woodpecker Pipeline was successful
ci/woodpecker/push/woodpecker Pipeline was successful
2025-01-30 16:53:07 +01:00
fc6f407a52 timing
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-01-30 16:42:09 +01:00
0e9cb0a7f8 timing
All checks were successful
ci/woodpecker/tag/woodpecker Pipeline was successful
ci/woodpecker/push/woodpecker Pipeline was successful
2025-01-30 14:16:01 +01:00
e3b2ea704d timing
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-01-30 13:35:08 +01:00
8bd4a4b695 timing
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-01-30 13:30:45 +01:00
89f3cbb5d1 timing
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-01-30 10:31:36 +01:00
37c4a373b7 timing
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-01-30 10:25:56 +01:00
408cff442c timing
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-01-30 10:19:07 +01:00
67b88aa2a1 timing
Some checks failed
ci/woodpecker/push/woodpecker Pipeline failed
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-01-30 10:03:16 +01:00
7b0238b4a5 use new decrypt approach
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-01-29 17:28:12 +01:00
16771227bb adjust configuration
Some checks failed
ci/woodpecker/push/woodpecker Pipeline failed
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-01-29 15:40:34 +01:00
aa97e3cdd3 adjust configuration
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-01-29 15:27:35 +01:00
2c93c7ec47 token debug
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-01-28 21:57:23 +01:00
e0b1c469d2 token debug
Some checks failed
ci/woodpecker/push/woodpecker Pipeline failed
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-01-28 21:54:13 +01:00
d646090802 token debug
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-01-28 21:49:27 +01:00
c4fd8b2cfd fix
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-01-28 14:51:56 +01:00
37ce3d47ca fix
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-01-28 14:41:18 +01:00
988f24994b fix
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-01-28 14:28:24 +01:00
5 changed files with 147 additions and 29 deletions

View File

@ -13,14 +13,8 @@ steps:
dockerfile: Dockerfile dockerfile: Dockerfile
when: when:
- event: [push, tag] - event: [push, tag]
scan_image:
image: aquasec/trivy
commands:
- TRIVY_DISABLE_VEX_NOTICE=1 trivy image $FORGE_NAME/$CI_REPO:$CI_COMMIT_SHA --quiet --exit-code 1
when:
- event: [push, tag]
deploy: deploy:
image: quay.io/wollud1969/k8s-admin-helper:0.1.3 image: quay.io/wollud1969/k8s-admin-helper:0.2.1
environment: environment:
KUBE_CONFIG_CONTENT: KUBE_CONFIG_CONTENT:
from_secret: kube_config from_secret: kube_config

View File

@ -4,6 +4,8 @@ metadata:
name: pv-stats name: pv-stats
labels: labels:
app: pv-stats app: pv-stats
annotations:
secret.reloader.stakater.com/reload: pv-stats
spec: spec:
replicas: 1 replicas: 1
selector: selector:

View File

@ -19,10 +19,11 @@ kubectl create namespace $NAMESPACE \
-o yaml | \ -o yaml | \
kubectl -f - apply kubectl -f - apply
SECRETS_FILE=`mktemp` # SECRETS_FILE=`mktemp`
gpg --decrypt --passphrase $GPG_PASSPHRASE --yes --batch --homedir /tmp/.gnupg --output $SECRETS_FILE secrets.asc # gpg --decrypt --passphrase $GPG_PASSPHRASE --yes --batch --homedir /tmp/.gnupg --output $SECRETS_FILE secrets.asc
. $SECRETS_FILE # . $SECRETS_FILE
rm $SECRETS_FILE # rm $SECRETS_FILE
eval "`cat secrets.asc | /usr/local/bin/decrypt-secrets.sh`"
kubectl create secret generic pv-stats \ kubectl create secret generic pv-stats \
--dry-run=client \ --dry-run=client \

View File

@ -1,15 +1,15 @@
-----BEGIN PGP MESSAGE----- -----BEGIN PGP MESSAGE-----
jA0ECQMCjSYtdE7M+rT/0sEmAbK6tjOB7cduweJyS3sULIl017BrOM1HGV1v4QkN jA0ECQMC4tvOFCp2PT7/0sE+AcZmiGwroHYdWW/vJA1sCoMrM2oW8xUc1KndDGto
NWtceKHpJEFmk1ZFh1EmyS724bDxtVLm8qJN4VzV+cLPa8/IiTzOHJEg4PDRcc0U dFev2KcoZ4FRL9liCrJ7on773bZFCTDu2xiBNMeKF1p8drub1Ej5l61Oq15mLTYf
LRCZOb5PtfKWBcQdRshT3JhIxxOKG53ZKOHJ2VodZ/iqmpSbjZ39GtzLJoXlXwCJ XjXknFiWWq4PTzhhy53zvDyIV5yIcGfjotpGC83/qH5CBWIcCw9PZHI5+uHRVRKL
jHAHgADdvq5J7joSaozjxVCXQI2nOmlEcZosZVd6LZDvemWtmci4ARJtOEqad4Wi OpdTpekJ8ljPAGQ4F3vbOeBbG1PMoclx8r5SpKxLwQco2iaXd71dXHtPkEnLujd4
mbleupBF18pA7b63hfmHocO89b9pTpUZWw10b0SHrUiKZG6kHeQXE8GGu+4HiRcG ZRCthVNVfxrIIRcTJfgxewz2oJWYF9008nmtxpKzqfBtNpFIdBtnTTVvv2lfcVuS
xJN/yYe2Ly4tHwT54gQ7ytLrxOnJUQKelSpq370t/lrGTf8L4b2eq+tED8/7z5Ho S4eav9ljPPd/exZaT7fOGD/kuCdiiu6e0yGLzo2ykf0uBy7hG7ZJg8TH5e+LMBm9
E12VDreYLFaS2IeJCFBefiGGbKgttPcHZvDCPCUpCpfSShlqtTaidwsTq2lPFGyo Q1OFD+5LWeEsOEdSTlT5UbV5EaVEiawKWQn7rMZOyvBNiPwLaOlquHirWoll5eIm
0Lz9NHsqp4/U+4nabCWNVWhnZhwqIPYOt/kKsYvjq40U29q7RORnxz8l/Ym4vB1P OHgBVN0FiY/righpIoei/KP705FIg+hrpMUvc29PahiL8dgsxJycnKIo4t+2/nac
kLeouElJvAz6vt/1+nFQJb65YykTqKQpbKgbziFiUFwb37QzBqRO7tuv3/MUzJ1Q H144hP/rqBeaobG05TkZIr+Cpt4rpwwfNWOHgmNFHVpxdmPfoeAmpT4nz760hTEN
KB7+IOQ43vFbAK6DZAhJZtAspVYiZ95niVxepaghzf33/FquhtZ+XpFEBDv525z7 2ol4Qh8xL7n4GFiCpNg8tNZiZkPPa/aRUAFxgdq5beossvKukxCexQmlCFvxITSG
pML7BxFxuwF5q7HBTPtc4IeJBgGi16wm x9RwssFMnT+wqTuzBN8neBeEF1d9AtAFQKPtg3wkMUyJAlYkxiGS/2NJuYDVpmEQ
=K6Qq =Wr4f
-----END PGP MESSAGE----- -----END PGP MESSAGE-----

View File

@ -1,4 +1,4 @@
from flask import Flask, session, g, render_template_string from flask import Flask, session, g, render_template_string, Response
from flask_session import Session from flask_session import Session
from flask_oidc import OpenIDConnect from flask_oidc import OpenIDConnect
from werkzeug.middleware.proxy_fix import ProxyFix from werkzeug.middleware.proxy_fix import ProxyFix
@ -7,9 +7,11 @@ import redis
import json import json
import os import os
import plotly.express as px import plotly.express as px
import plotly.graph_objects as po
import pandas as pd import pandas as pd
import psycopg import psycopg
import sqlalchemy import sqlalchemy
import time
try: try:
redis_url = os.environ['REDIS_URL'] redis_url = os.environ['REDIS_URL']
@ -34,18 +36,35 @@ app.config.update({
Session(app) Session(app)
oidc = OpenIDConnect(app) oidc = OpenIDConnect(app)
@app.route('/token_debug', methods=['GET'])
@oidc.require_login
def token_debug():
# Access Token vom Identity Provider abrufen
access_token = oidc.get_access_token()
return json.dumps({
"access_token": access_token
})
@app.route('/') @app.route('/')
@oidc.require_login @oidc.require_login
def index(): def index():
try: try:
stepX_time = time.time()
dbh = psycopg.connect() dbh = psycopg.connect()
engine = sqlalchemy.create_engine("postgresql+psycopg://", creator=lambda: dbh) engine = sqlalchemy.create_engine("postgresql+psycopg://", creator=lambda: dbh)
step0_time = time.time()
df = pd.read_sql("SELECT month, cast(year AS varchar), current_energy AS value FROM pv_energy_by_month", con=engine) df = pd.read_sql("SELECT month, cast(year AS varchar), current_energy AS value FROM pv_energy_by_month", con=engine)
step1_time = time.time()
duration1 = step1_time - step0_time
logger.info(f"{duration1=}")
fig_1 = px.bar(df, x='month', y='value', color='year', barmode='group') fig_1 = px.bar(df, x='month', y='value', color='year', barmode='group')
step2_time = time.time()
duration2 = step2_time - step1_time
logger.info(f"{duration2=}")
fig_1.update_layout( fig_1.update_layout(
title="Jahreswerte Exportierte Energie", title=f"Jahreswerte Exportierte Energie {duration1:.3f}, {duration2:.3f}",
xaxis_title="Monat", xaxis_title="",
yaxis_title="", yaxis_title="",
legend_title="Jahr", legend_title="Jahr",
xaxis=dict( xaxis=dict(
@ -55,14 +74,44 @@ def index():
), ),
yaxis=dict(ticksuffix=" kWh") yaxis=dict(ticksuffix=" kWh")
) )
graph_html_1 = fig_1.to_html(full_html=False) graph_html_1 = fig_1.to_html(full_html=False, default_height='30%')
step3_time = time.time()
df = pd.read_sql("SELECT time_bucket('5 minutes', time) AS bucket, AVG(power) AS avg_power FROM pv_power_v WHERE time >= date_trunc('day', now()) - '1 day'::interval AND time < date_trunc('day', now()) GROUP BY bucket ORDER BY bucket", con=engine) df = pd.read_sql("SELECT time_bucket('5 minutes', time) AS bucket, AVG(power) AS avg_power FROM pv_power_v WHERE time >= date_trunc('day', now()) - '1 day'::interval AND time < date_trunc('day', now()) GROUP BY bucket ORDER BY bucket", con=engine)
step4_time = time.time()
duration3 = step4_time - step3_time
logger.info(f"{duration3=}")
fig_2 = px.line(df, x='bucket', y='avg_power') fig_2 = px.line(df, x='bucket', y='avg_power')
step5_time = time.time()
duration4 = step5_time - step4_time
logger.info(f"{duration4=}")
fig_2.update_layout( fig_2.update_layout(
title="Export gestern" xaxis_title="",
yaxis_title="",
title=f"Export gestern {duration3:.3f}, {duration4:.3f}",
yaxis=dict(ticksuffix=" W")
) )
graph_html_2 = fig_2.to_html(full_html=False) graph_html_2 = fig_2.to_html(full_html=False, default_height='30%')
step6_time = time.time()
df = pd.read_sql("SELECT time_bucket('5 minutes', time) AS bucket, AVG(power) AS avg_power FROM pv_power_v WHERE time >= date_trunc('day', now()) AND time < date_trunc('day', now()) + '1 day'::interval GROUP BY bucket ORDER BY bucket", con=engine)
step7_time = time.time()
duration5 = step7_time - step6_time
logger.info(f"{duration5=}")
fig_3 = px.line(df, x='bucket', y='avg_power')
step8_time = time.time()
duration6 = step8_time - step7_time
logger.info(f"{duration6=}")
fig_3.update_layout(
xaxis_title="",
yaxis_title="",
title=f"Export heute {duration5:.3f}, {duration6:.3f}",
yaxis=dict(ticksuffix=" W")
)
graph_html_3 = fig_3.to_html(full_html=False, default_height='30%')
stepZ_time = time.time()
duration7 = stepZ_time - stepX_time
logger.info(f"{duration7=}")
return render_template_string(f""" return render_template_string(f"""
<html> <html>
@ -72,6 +121,24 @@ def index():
<body> <body>
{graph_html_1} {graph_html_1}
{graph_html_2} {graph_html_2}
{graph_html_3}
<div style="height:9vh; background-color:lightgrey; font-family: Courier, Consolas, monospace;">
<table style="border-collapse: collapse;">
<style>
td.smallsep {{ padding-right: 10px }}
td.largesep {{ padding-right: 30px }}
</style>
<tr>
<td class="smallsep">Query 1:</td><td class="largesep"> {duration1:.3f} s</td><td class="smallsep">Graph 1:</td><td> {duration2:.3f} s</td>
</tr><tr>
<td class="smallsep">Query 2:</td><td class="largesep"> {duration3:.3f} s</td><td class="smallsep">Graph 2:</td><td> {duration4:.3f} s</td>
</tr><tr>
<td class="smallsep">Query 3:</td><td class="largesep"> {duration5:.3f} s</td><td class="smallsep">Graph 3:</td><td> {duration6:.3f} s</td>
</tr><tr>
<td class="smallsep">Total:</td><td> {duration7:.3f} s</td><td></td><td></td>
</tr>
</table>
</div>
</body> </body>
</html> </html>
""") """)
@ -82,6 +149,60 @@ def index():
dbh.close() dbh.close()
@app.route('/ntpserver')
def ntpserver():
try:
dbh = psycopg.connect()
engine = sqlalchemy.create_engine("postgresql+psycopg://", creator=lambda: dbh)
query = """
select time_bucket('5 minutes', time) as bucket,
device,
avg(cast(values->'rootdisp'->>'value' as float)) as rootdisp,
avg(cast(values->'stratum'->>'value' as int)) as stratum
from measurements
where time >= date_trunc('day', now()) AND time < date_trunc('day', now()) + '1 day'::interval and
application = 'TSM' and attributes->>'Label' = 'david'
group by bucket, device
order by bucket, device
"""
df = pd.read_sql(query, con=engine)
fig = po.Figure()
fig.add_trace(po.Scatter(x=df['bucket'], y=df['rootdisp'], mode='lines', name='Root Dispersion', yaxis='y1', line=dict(color='red')))
fig.add_trace(po.Scatter(x=df['bucket'], y=df['stratum'], mode='lines', name='Stratum', yaxis='y2', line=dict(color='blue')))
fig.update_layout(
# Linke Y-Achse
yaxis=dict(
title='Root Dispersion'
),
# Rechte Y-Achse
yaxis2=dict(
title='Stratum',
overlaying='y', # Legt die zweite Y-Achse über die erste
side='right', # Setzt sie auf die rechte Seite
tickmode='linear', # Stellt sicher, dass die Ticks in festen Intervallen sind
dtick=1, # Zeigt nur ganzzahlige Ticks
),
legend=dict(x=0.05, y=1) # Position der Legende
)
img = fig.to_image(format='png')
return Response(img, mimetype='image/png')
except Exception as e:
raise Exception(f"Error when querying NTP server values: {e}")
finally:
if dbh is not None:
dbh.close()
if __name__ == '__main__': if __name__ == '__main__':