Compare commits

...

52 Commits

Author SHA1 Message Date
4db989022e time reqs
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-02-13 10:59:17 +01:00
af76406afa packets, 3
Some checks failed
ci/woodpecker/push/woodpecker Pipeline failed
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-02-12 19:01:58 +01:00
d85c32247e packets, 2
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-02-12 18:48:37 +01:00
ba7b86e527 packets
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-02-12 18:42:27 +01:00
c93ae4067e more graphs, 5
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-02-12 13:38:26 +01:00
1bbfdf65fb more graphs, 3
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-02-12 13:28:29 +01:00
315ad9998b more graphs, 2
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-02-12 13:18:41 +01:00
047a3a6c08 more graphs
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-02-12 13:14:01 +01:00
5573024fd9 ticksuffix, 2
Some checks failed
ci/woodpecker/push/woodpecker Pipeline failed
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-02-12 12:54:33 +01:00
a58f914c40 ticksuffix
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-02-12 12:45:52 +01:00
4ec9690981 from ntpserver_added
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-02-12 12:38:41 +01:00
6af509c2f6 drop kaleido 2025-02-12 12:35:07 +01:00
22eba69526 merged
Some checks failed
ci/woodpecker/push/woodpecker Pipeline failed
2025-02-12 12:32:55 +01:00
a2855edd47 use tag
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
2025-02-12 12:20:04 +01:00
a2f720855d sbom scanning
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
2025-02-12 12:15:42 +01:00
cbb9ff7a23 kaleido, 3
Some checks failed
ci/woodpecker/push/woodpecker Pipeline failed
ci/woodpecker/tag/woodpecker Pipeline failed
2025-02-11 22:13:57 +01:00
84b5bbe325 python 3.11
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-02-11 21:59:14 +01:00
a42254cf95 import kaleido, 2
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-02-11 21:21:14 +01:00
3d05ea5d28 import kaleido
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-02-11 19:58:08 +01:00
52217c5251 still kaleido
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-02-11 19:53:48 +01:00
022d7a8926 merged
Some checks failed
ci/woodpecker/push/woodpecker Pipeline failed
2025-02-11 19:52:27 +01:00
698f926376 Merge branch 'main' of gitea.hottis.de:wn/pv-stats 2025-02-11 19:52:10 +01:00
f74ad50b94 kaleido 2025-02-11 19:52:04 +01:00
0c03d9f94e add kaleido, 3
Some checks failed
ci/woodpecker/push/woodpecker Pipeline failed
ci/woodpecker/tag/woodpecker Pipeline failed
2025-02-11 19:49:49 +01:00
eca5affd53 add kaleido, 2
Some checks failed
ci/woodpecker/push/woodpecker Pipeline failed
ci/woodpecker/tag/woodpecker Pipeline failed
2025-02-11 19:48:09 +01:00
6236673d28 add kaleido
Some checks failed
ci/woodpecker/push/woodpecker Pipeline failed
ci/woodpecker/tag/woodpecker Pipeline failed
2025-02-11 19:47:07 +01:00
4213dc7329 ntp as png
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-02-11 19:42:19 +01:00
2d3eab0db8 disable trivy for the moment
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-02-11 16:59:58 +01:00
73b55b05c4 ntp server numbers
Some checks failed
ci/woodpecker/tag/woodpecker Pipeline failed
ci/woodpecker/push/woodpecker Pipeline failed
2025-02-11 16:52:51 +01:00
aa74c02498 timing
All checks were successful
ci/woodpecker/tag/woodpecker Pipeline was successful
ci/woodpecker/push/woodpecker Pipeline was successful
2025-01-31 10:29:42 +01:00
6fd2bd0863 timing
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-01-31 10:19:03 +01:00
2c78fba3a6 timing
All checks were successful
ci/woodpecker/tag/woodpecker Pipeline was successful
ci/woodpecker/push/woodpecker Pipeline was successful
2025-01-30 17:37:55 +01:00
86b883569f timing
Some checks failed
ci/woodpecker/push/woodpecker Pipeline failed
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-01-30 17:24:18 +01:00
ba86a08632 timing
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-01-30 17:16:08 +01:00
0b61a18eb1 timing
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-01-30 17:10:01 +01:00
1418603007 timing
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-01-30 16:59:36 +01:00
9926c89ef2 timing
All checks were successful
ci/woodpecker/tag/woodpecker Pipeline was successful
ci/woodpecker/push/woodpecker Pipeline was successful
2025-01-30 16:53:07 +01:00
fc6f407a52 timing
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-01-30 16:42:09 +01:00
0e9cb0a7f8 timing
All checks were successful
ci/woodpecker/tag/woodpecker Pipeline was successful
ci/woodpecker/push/woodpecker Pipeline was successful
2025-01-30 14:16:01 +01:00
e3b2ea704d timing
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-01-30 13:35:08 +01:00
8bd4a4b695 timing
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-01-30 13:30:45 +01:00
89f3cbb5d1 timing
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-01-30 10:31:36 +01:00
37c4a373b7 timing
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-01-30 10:25:56 +01:00
408cff442c timing
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-01-30 10:19:07 +01:00
67b88aa2a1 timing
Some checks failed
ci/woodpecker/push/woodpecker Pipeline failed
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-01-30 10:03:16 +01:00
7b0238b4a5 use new decrypt approach
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-01-29 17:28:12 +01:00
16771227bb adjust configuration
Some checks failed
ci/woodpecker/push/woodpecker Pipeline failed
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-01-29 15:40:34 +01:00
aa97e3cdd3 adjust configuration
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-01-29 15:27:35 +01:00
2c93c7ec47 token debug
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-01-28 21:57:23 +01:00
e0b1c469d2 token debug
Some checks failed
ci/woodpecker/push/woodpecker Pipeline failed
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-01-28 21:54:13 +01:00
d646090802 token debug
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-01-28 21:49:27 +01:00
c4fd8b2cfd fix
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-01-28 14:51:56 +01:00
6 changed files with 217 additions and 24 deletions

View File

@ -13,14 +13,44 @@ steps:
dockerfile: Dockerfile
when:
- event: [push, tag]
scan_image:
image: aquasec/trivy
scan:
image: quay.io/wollud1969/woodpecker-helper:0.5.1
environment:
TRIVY_TOKEN:
from_secret: trivy_token
TRIVY_URL:
from_secret: trivy_url
DTRACK_API_KEY:
from_secret: dtrack_api_key
DTRACK_API_URL:
from_secret: dtrack_api_url
commands:
- TRIVY_DISABLE_VEX_NOTICE=1 trivy image $FORGE_NAME/$CI_REPO:$CI_COMMIT_SHA --quiet --exit-code 1
- HOME=/home/`id -nu`
- TAG="${CI_COMMIT_TAG:-$CI_COMMIT_SHA}"
- |
trivy image \
--server $TRIVY_URL \
--token $TRIVY_TOKEN \
--format cyclonedx \
--scanners license \
--output /tmp/sbom.xml \
$FORGE_NAME/$CI_REPO:$TAG
- cat /tmp/sbom.xml
- |
curl -X "POST" \
-H "Content-Type: multipart/form-data" \
-H "X-Api-Key: $DTRACK_API_KEY" \
-F "autoCreate=true" \
-F "projectName=$CI_REPO" \
-F "projectVersion=$TAG" \
-F "bom=@/tmp/sbom.xml"\
"$DTRACK_API_URL/api/v1/bom"
when:
- event: [push, tag]
deploy:
image: quay.io/wollud1969/k8s-admin-helper:0.1.3
image: quay.io/wollud1969/k8s-admin-helper:0.2.1
environment:
KUBE_CONFIG_CONTENT:
from_secret: kube_config

View File

@ -1,4 +1,4 @@
FROM python:3.12-alpine3.21
FROM python:3.11-alpine3.21
ENV REDIS_URL=""
ENV SECRET_KEY=""

View File

@ -4,6 +4,8 @@ metadata:
name: pv-stats
labels:
app: pv-stats
annotations:
secret.reloader.stakater.com/reload: pv-stats
spec:
replicas: 1
selector:

View File

@ -19,10 +19,11 @@ kubectl create namespace $NAMESPACE \
-o yaml | \
kubectl -f - apply
SECRETS_FILE=`mktemp`
gpg --decrypt --passphrase $GPG_PASSPHRASE --yes --batch --homedir /tmp/.gnupg --output $SECRETS_FILE secrets.asc
. $SECRETS_FILE
rm $SECRETS_FILE
# SECRETS_FILE=`mktemp`
# gpg --decrypt --passphrase $GPG_PASSPHRASE --yes --batch --homedir /tmp/.gnupg --output $SECRETS_FILE secrets.asc
# . $SECRETS_FILE
# rm $SECRETS_FILE
eval "`cat secrets.asc | /usr/local/bin/decrypt-secrets.sh`"
kubectl create secret generic pv-stats \
--dry-run=client \

View File

@ -1,15 +1,15 @@
-----BEGIN PGP MESSAGE-----
jA0ECQMCjSYtdE7M+rT/0sEmAbK6tjOB7cduweJyS3sULIl017BrOM1HGV1v4QkN
NWtceKHpJEFmk1ZFh1EmyS724bDxtVLm8qJN4VzV+cLPa8/IiTzOHJEg4PDRcc0U
LRCZOb5PtfKWBcQdRshT3JhIxxOKG53ZKOHJ2VodZ/iqmpSbjZ39GtzLJoXlXwCJ
jHAHgADdvq5J7joSaozjxVCXQI2nOmlEcZosZVd6LZDvemWtmci4ARJtOEqad4Wi
mbleupBF18pA7b63hfmHocO89b9pTpUZWw10b0SHrUiKZG6kHeQXE8GGu+4HiRcG
xJN/yYe2Ly4tHwT54gQ7ytLrxOnJUQKelSpq370t/lrGTf8L4b2eq+tED8/7z5Ho
E12VDreYLFaS2IeJCFBefiGGbKgttPcHZvDCPCUpCpfSShlqtTaidwsTq2lPFGyo
0Lz9NHsqp4/U+4nabCWNVWhnZhwqIPYOt/kKsYvjq40U29q7RORnxz8l/Ym4vB1P
kLeouElJvAz6vt/1+nFQJb65YykTqKQpbKgbziFiUFwb37QzBqRO7tuv3/MUzJ1Q
KB7+IOQ43vFbAK6DZAhJZtAspVYiZ95niVxepaghzf33/FquhtZ+XpFEBDv525z7
pML7BxFxuwF5q7HBTPtc4IeJBgGi16wm
=K6Qq
jA0ECQMC4tvOFCp2PT7/0sE+AcZmiGwroHYdWW/vJA1sCoMrM2oW8xUc1KndDGto
dFev2KcoZ4FRL9liCrJ7on773bZFCTDu2xiBNMeKF1p8drub1Ej5l61Oq15mLTYf
XjXknFiWWq4PTzhhy53zvDyIV5yIcGfjotpGC83/qH5CBWIcCw9PZHI5+uHRVRKL
OpdTpekJ8ljPAGQ4F3vbOeBbG1PMoclx8r5SpKxLwQco2iaXd71dXHtPkEnLujd4
ZRCthVNVfxrIIRcTJfgxewz2oJWYF9008nmtxpKzqfBtNpFIdBtnTTVvv2lfcVuS
S4eav9ljPPd/exZaT7fOGD/kuCdiiu6e0yGLzo2ykf0uBy7hG7ZJg8TH5e+LMBm9
Q1OFD+5LWeEsOEdSTlT5UbV5EaVEiawKWQn7rMZOyvBNiPwLaOlquHirWoll5eIm
OHgBVN0FiY/righpIoei/KP705FIg+hrpMUvc29PahiL8dgsxJycnKIo4t+2/nac
H144hP/rqBeaobG05TkZIr+Cpt4rpwwfNWOHgmNFHVpxdmPfoeAmpT4nz760hTEN
2ol4Qh8xL7n4GFiCpNg8tNZiZkPPa/aRUAFxgdq5beossvKukxCexQmlCFvxITSG
x9RwssFMnT+wqTuzBN8neBeEF1d9AtAFQKPtg3wkMUyJAlYkxiGS/2NJuYDVpmEQ
=Wr4f
-----END PGP MESSAGE-----

View File

@ -7,9 +7,11 @@ import redis
import json
import os
import plotly.express as px
import plotly.graph_objects as po
import pandas as pd
import psycopg
import sqlalchemy
import time
try:
redis_url = os.environ['REDIS_URL']
@ -34,17 +36,34 @@ app.config.update({
Session(app)
oidc = OpenIDConnect(app)
@app.route('/token_debug', methods=['GET'])
@oidc.require_login
def token_debug():
# Access Token vom Identity Provider abrufen
access_token = oidc.get_access_token()
return json.dumps({
"access_token": access_token
})
@app.route('/')
@oidc.require_login
def index():
try:
stepX_time = time.time()
dbh = psycopg.connect()
engine = sqlalchemy.create_engine("postgresql+psycopg://", creator=lambda: dbh)
step0_time = time.time()
df = pd.read_sql("SELECT month, cast(year AS varchar), current_energy AS value FROM pv_energy_by_month", con=engine)
step1_time = time.time()
duration1 = step1_time - step0_time
logger.info(f"{duration1=}")
fig_1 = px.bar(df, x='month', y='value', color='year', barmode='group')
step2_time = time.time()
duration2 = step2_time - step1_time
logger.info(f"{duration2=}")
fig_1.update_layout(
title="Jahreswerte Exportierte Energie",
title=f"Jahreswerte Exportierte Energie {duration1:.3f}, {duration2:.3f}",
xaxis_title="",
yaxis_title="",
legend_title="Jahr",
@ -57,25 +76,42 @@ def index():
)
graph_html_1 = fig_1.to_html(full_html=False, default_height='30%')
step3_time = time.time()
df = pd.read_sql("SELECT time_bucket('5 minutes', time) AS bucket, AVG(power) AS avg_power FROM pv_power_v WHERE time >= date_trunc('day', now()) - '1 day'::interval AND time < date_trunc('day', now()) GROUP BY bucket ORDER BY bucket", con=engine)
step4_time = time.time()
duration3 = step4_time - step3_time
logger.info(f"{duration3=}")
fig_2 = px.line(df, x='bucket', y='avg_power')
step5_time = time.time()
duration4 = step5_time - step4_time
logger.info(f"{duration4=}")
fig_2.update_layout(
xaxis_title="",
yaxis_title="",
title="Export gestern",
title=f"Export gestern {duration3:.3f}, {duration4:.3f}",
yaxis=dict(ticksuffix=" W")
)
graph_html_2 = fig_2.to_html(full_html=False, default_height='30%')
step6_time = time.time()
df = pd.read_sql("SELECT time_bucket('5 minutes', time) AS bucket, AVG(power) AS avg_power FROM pv_power_v WHERE time >= date_trunc('day', now()) AND time < date_trunc('day', now()) + '1 day'::interval GROUP BY bucket ORDER BY bucket", con=engine)
step7_time = time.time()
duration5 = step7_time - step6_time
logger.info(f"{duration5=}")
fig_3 = px.line(df, x='bucket', y='avg_power')
step8_time = time.time()
duration6 = step8_time - step7_time
logger.info(f"{duration6=}")
fig_3.update_layout(
xaxis_title="",
yaxis_title="",
title="Export heute",
title=f"Export heute {duration5:.3f}, {duration6:.3f}",
yaxis=dict(ticksuffix=" W")
)
graph_html_3 = fig_3.to_html(full_html=False, default_height='30%')
stepZ_time = time.time()
duration7 = stepZ_time - stepX_time
logger.info(f"{duration7=}")
return render_template_string(f"""
<html>
@ -86,6 +122,23 @@ def index():
{graph_html_1}
{graph_html_2}
{graph_html_3}
<div style="height:9vh; background-color:lightgrey; font-family: Courier, Consolas, monospace;">
<table style="border-collapse: collapse;">
<style>
td.smallsep {{ padding-right: 10px }}
td.largesep {{ padding-right: 30px }}
</style>
<tr>
<td class="smallsep">Query 1:</td><td class="largesep"> {duration1:.3f} s</td><td class="smallsep">Graph 1:</td><td> {duration2:.3f} s</td>
</tr><tr>
<td class="smallsep">Query 2:</td><td class="largesep"> {duration3:.3f} s</td><td class="smallsep">Graph 2:</td><td> {duration4:.3f} s</td>
</tr><tr>
<td class="smallsep">Query 3:</td><td class="largesep"> {duration5:.3f} s</td><td class="smallsep">Graph 3:</td><td> {duration6:.3f} s</td>
</tr><tr>
<td class="smallsep">Total:</td><td> {duration7:.3f} s</td><td></td><td></td>
</tr>
</table>
</div>
</body>
</html>
""")
@ -96,6 +149,113 @@ def index():
dbh.close()
@app.route('/ntpserver')
def ntpserver():
try:
dbh = psycopg.connect()
engine = sqlalchemy.create_engine("postgresql+psycopg://", creator=lambda: dbh)
query = """
select time_bucket('5 minutes', time) as bucket,
device,
avg(cast(values->'rootdisp'->>'value' as float)) as rootdisp,
max(cast(values->'stratum'->>'value' as int)) as stratum
from measurements
where time >= date_trunc('day', now()) AND time < date_trunc('day', now()) + '1 day'::interval and
application = 'TSM' and attributes->>'Label' = 'david'
group by bucket, device
order by bucket, device
"""
df = pd.read_sql(query, con=engine)
fig = po.Figure()
fig.add_trace(po.Scatter(x=df['bucket'], y=df['rootdisp'], mode='lines', name='Root Dispersion', yaxis='y1', line=dict(color='red')))
fig.add_trace(po.Scatter(x=df['bucket'], y=df['stratum'], mode='lines', name='Stratum', yaxis='y2', line=dict(color='blue')))
fig.update_layout(
title='NTP Server Numbers',
# Linke Y-Achse
yaxis=dict(
title='Root Dispersion',
ticksuffix=' ms'
),
# Rechte Y-Achse
yaxis2=dict(
title='Stratum',
overlaying='y', # Legt die zweite Y-Achse über die erste
side='right', # Setzt sie auf die rechte Seite
tickmode='linear', # Stellt sicher, dass die Ticks in festen Intervallen sind
dtick=1, # Zeigt nur ganzzahlige Ticks
),
legend=dict(x=0.05, y=1) # Position der Legende
)
graph_html_1 = fig.to_html(full_html=False, default_height='30%')
query = """
select time_bucket('5 minutes', time) as bucket,
device,
avg(cast(values->'time-reqs-pkts'->>'value' as float)) as packets
from measurements
where time >= date_trunc('day', now()) AND time < date_trunc('day', now()) + '1 day'::interval and
application = 'SNMP' and attributes->>'Label' = 'david'
group by bucket, device
order by bucket, device
"""
df = pd.read_sql(query, con=engine)
fig_2 = px.line(df, x='bucket', y='packets')
fig_2.update_layout(
xaxis_title="",
yaxis_title="",
yaxis_ticksuffix="p/s",
title=f"Time Requests"
)
graph_html_2 = fig_2.to_html(full_html=False, default_height='30%')
query = """
select time_bucket('5 minutes', time) as bucket,
device,
avg(cast(values->'load1'->>'value' as float)) as loadaverage1min
from measurements
where time >= date_trunc('day', now()) AND time < date_trunc('day', now()) + '1 day'::interval and
application = 'SNMP' and attributes->>'Label' = 'david'
group by bucket, device
order by bucket, device
"""
df = pd.read_sql(query, con=engine)
fig_3 = px.line(df, x='bucket', y='loadaverage1min')
fig_3.update_layout(
xaxis_title="",
yaxis_title="",
title=f"CPU Load"
)
graph_html_3 = fig_3.to_html(full_html=False, default_height='30%')
return render_template_string(f"""
<html>
<head>
<title>NTP Server Numbers</title>
</head>
<body>
{graph_html_1}
{graph_html_2}
{graph_html_3}
</body>
</html>
""")
except Exception as e:
raise Exception(f"Error when querying NTP server values: {e}")
finally:
if dbh is not None:
dbh.close()
if __name__ == '__main__':