Compare commits

..

80 Commits
0.0.6 ... main

Author SHA1 Message Date
2d48e87893 ntp graphs
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-03-13 10:50:11 +01:00
6c1a62e09d nicer graph
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
2025-03-12 21:13:24 +01:00
a5d3b13629 changes
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
2025-03-12 20:49:44 +01:00
83f71b3f81 fix, 3
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-03-12 16:22:07 +01:00
730168ab61 fix, 2
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-03-12 16:18:28 +01:00
8bef6d676c fix
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-03-12 16:14:36 +01:00
813265f8ee forgotten requirement, 2
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-03-12 16:10:28 +01:00
b47070cfc2 forgotten requirement
Some checks failed
ci/woodpecker/push/woodpecker Pipeline failed
ci/woodpecker/tag/woodpecker Pipeline failed
2025-03-12 16:08:57 +01:00
92ef3e6a85 more png
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-03-12 16:04:34 +01:00
a63776fb3f deploy names changed
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-03-12 15:43:33 +01:00
e24a29e94f fix
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-03-12 15:02:41 +01:00
b3c2c7794a pillow
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-03-12 14:48:40 +01:00
7ff1b70098 routes
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-03-12 13:23:24 +01:00
aa4c307048 fix
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-03-12 13:07:05 +01:00
19672e6106 fix deployment
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-03-12 12:57:45 +01:00
97e9d3e4e5 load route files correctly
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-03-12 12:01:07 +01:00
0914a91fa0 debug start script and separate routes into separate files
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
2025-03-12 11:57:37 +01:00
a972916704 switch to python 3.12
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-02-19 13:11:55 +01:00
1774bb11aa change public name
Some checks failed
ci/woodpecker/push/woodpecker Pipeline failed
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-02-13 11:37:13 +01:00
d19ac55dea time reqs, 2
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-02-13 11:09:18 +01:00
4db989022e time reqs
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-02-13 10:59:17 +01:00
af76406afa packets, 3
Some checks failed
ci/woodpecker/push/woodpecker Pipeline failed
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-02-12 19:01:58 +01:00
d85c32247e packets, 2
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-02-12 18:48:37 +01:00
ba7b86e527 packets
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-02-12 18:42:27 +01:00
c93ae4067e more graphs, 5
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-02-12 13:38:26 +01:00
1bbfdf65fb more graphs, 3
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-02-12 13:28:29 +01:00
315ad9998b more graphs, 2
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-02-12 13:18:41 +01:00
047a3a6c08 more graphs
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-02-12 13:14:01 +01:00
5573024fd9 ticksuffix, 2
Some checks failed
ci/woodpecker/push/woodpecker Pipeline failed
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-02-12 12:54:33 +01:00
a58f914c40 ticksuffix
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-02-12 12:45:52 +01:00
4ec9690981 from ntpserver_added
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-02-12 12:38:41 +01:00
6af509c2f6 drop kaleido 2025-02-12 12:35:07 +01:00
22eba69526 merged
Some checks failed
ci/woodpecker/push/woodpecker Pipeline failed
2025-02-12 12:32:55 +01:00
a2855edd47 use tag
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
2025-02-12 12:20:04 +01:00
a2f720855d sbom scanning
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
2025-02-12 12:15:42 +01:00
cbb9ff7a23 kaleido, 3
Some checks failed
ci/woodpecker/push/woodpecker Pipeline failed
ci/woodpecker/tag/woodpecker Pipeline failed
2025-02-11 22:13:57 +01:00
84b5bbe325 python 3.11
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-02-11 21:59:14 +01:00
a42254cf95 import kaleido, 2
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-02-11 21:21:14 +01:00
3d05ea5d28 import kaleido
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-02-11 19:58:08 +01:00
52217c5251 still kaleido
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-02-11 19:53:48 +01:00
022d7a8926 merged
Some checks failed
ci/woodpecker/push/woodpecker Pipeline failed
2025-02-11 19:52:27 +01:00
698f926376 Merge branch 'main' of gitea.hottis.de:wn/pv-stats 2025-02-11 19:52:10 +01:00
f74ad50b94 kaleido 2025-02-11 19:52:04 +01:00
0c03d9f94e add kaleido, 3
Some checks failed
ci/woodpecker/push/woodpecker Pipeline failed
ci/woodpecker/tag/woodpecker Pipeline failed
2025-02-11 19:49:49 +01:00
eca5affd53 add kaleido, 2
Some checks failed
ci/woodpecker/push/woodpecker Pipeline failed
ci/woodpecker/tag/woodpecker Pipeline failed
2025-02-11 19:48:09 +01:00
6236673d28 add kaleido
Some checks failed
ci/woodpecker/push/woodpecker Pipeline failed
ci/woodpecker/tag/woodpecker Pipeline failed
2025-02-11 19:47:07 +01:00
4213dc7329 ntp as png
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-02-11 19:42:19 +01:00
2d3eab0db8 disable trivy for the moment
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-02-11 16:59:58 +01:00
73b55b05c4 ntp server numbers
Some checks failed
ci/woodpecker/tag/woodpecker Pipeline failed
ci/woodpecker/push/woodpecker Pipeline failed
2025-02-11 16:52:51 +01:00
aa74c02498 timing
All checks were successful
ci/woodpecker/tag/woodpecker Pipeline was successful
ci/woodpecker/push/woodpecker Pipeline was successful
2025-01-31 10:29:42 +01:00
6fd2bd0863 timing
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-01-31 10:19:03 +01:00
2c78fba3a6 timing
All checks were successful
ci/woodpecker/tag/woodpecker Pipeline was successful
ci/woodpecker/push/woodpecker Pipeline was successful
2025-01-30 17:37:55 +01:00
86b883569f timing
Some checks failed
ci/woodpecker/push/woodpecker Pipeline failed
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-01-30 17:24:18 +01:00
ba86a08632 timing
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-01-30 17:16:08 +01:00
0b61a18eb1 timing
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-01-30 17:10:01 +01:00
1418603007 timing
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-01-30 16:59:36 +01:00
9926c89ef2 timing
All checks were successful
ci/woodpecker/tag/woodpecker Pipeline was successful
ci/woodpecker/push/woodpecker Pipeline was successful
2025-01-30 16:53:07 +01:00
fc6f407a52 timing
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-01-30 16:42:09 +01:00
0e9cb0a7f8 timing
All checks were successful
ci/woodpecker/tag/woodpecker Pipeline was successful
ci/woodpecker/push/woodpecker Pipeline was successful
2025-01-30 14:16:01 +01:00
e3b2ea704d timing
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-01-30 13:35:08 +01:00
8bd4a4b695 timing
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-01-30 13:30:45 +01:00
89f3cbb5d1 timing
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-01-30 10:31:36 +01:00
37c4a373b7 timing
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-01-30 10:25:56 +01:00
408cff442c timing
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-01-30 10:19:07 +01:00
67b88aa2a1 timing
Some checks failed
ci/woodpecker/push/woodpecker Pipeline failed
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-01-30 10:03:16 +01:00
7b0238b4a5 use new decrypt approach
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-01-29 17:28:12 +01:00
16771227bb adjust configuration
Some checks failed
ci/woodpecker/push/woodpecker Pipeline failed
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-01-29 15:40:34 +01:00
aa97e3cdd3 adjust configuration
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-01-29 15:27:35 +01:00
2c93c7ec47 token debug
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-01-28 21:57:23 +01:00
e0b1c469d2 token debug
Some checks failed
ci/woodpecker/push/woodpecker Pipeline failed
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-01-28 21:54:13 +01:00
d646090802 token debug
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-01-28 21:49:27 +01:00
c4fd8b2cfd fix
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-01-28 14:51:56 +01:00
37ce3d47ca fix
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-01-28 14:41:18 +01:00
988f24994b fix
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-01-28 14:28:24 +01:00
b8dd70e5ae fix
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-01-28 14:20:09 +01:00
6ba9352edc second graph
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-01-28 14:13:30 +01:00
aaa23a9839 only one graph
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-01-28 13:54:11 +01:00
812989df47 two graphs
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-01-28 13:49:26 +01:00
452161ff03 disable trivy warning
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
2025-01-28 13:47:24 +01:00
cd3bf25fa1 two graphs
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-01-28 13:44:03 +01:00
13 changed files with 435 additions and 115 deletions

View File

@ -13,14 +13,44 @@ steps:
dockerfile: Dockerfile dockerfile: Dockerfile
when: when:
- event: [push, tag] - event: [push, tag]
scan_image:
image: aquasec/trivy scan:
image: quay.io/wollud1969/woodpecker-helper:0.5.1
environment:
TRIVY_TOKEN:
from_secret: trivy_token
TRIVY_URL:
from_secret: trivy_url
DTRACK_API_KEY:
from_secret: dtrack_api_key
DTRACK_API_URL:
from_secret: dtrack_api_url
commands: commands:
- trivy image $FORGE_NAME/$CI_REPO:$CI_COMMIT_SHA --quiet --exit-code 1 - HOME=/home/`id -nu`
- TAG="${CI_COMMIT_TAG:-$CI_COMMIT_SHA}"
- |
trivy image \
--server $TRIVY_URL \
--token $TRIVY_TOKEN \
--format cyclonedx \
--scanners license \
--output /tmp/sbom.xml \
$FORGE_NAME/$CI_REPO:$TAG
- cat /tmp/sbom.xml
- |
curl -X "POST" \
-H "Content-Type: multipart/form-data" \
-H "X-Api-Key: $DTRACK_API_KEY" \
-F "autoCreate=true" \
-F "projectName=$CI_REPO" \
-F "projectVersion=$TAG" \
-F "bom=@/tmp/sbom.xml"\
"$DTRACK_API_URL/api/v1/bom"
when: when:
- event: [push, tag] - event: [push, tag]
deploy: deploy:
image: quay.io/wollud1969/k8s-admin-helper:0.1.3 image: quay.io/wollud1969/k8s-admin-helper:0.2.1
environment: environment:
KUBE_CONFIG_CONTENT: KUBE_CONFIG_CONTENT:
from_secret: kube_config from_secret: kube_config

25
debug-build-run.sh Executable file
View File

@ -0,0 +1,25 @@
#!/bin/bash
set -x
IMAGE_NAME=numberimage
docker build --progress=plain -t $IMAGE_NAME .
. load-debug-env
docker run \
-it \
--rm \
-e "REDIS_URL=$REDIS_URL" \
-e "SECRET_KEY=$SECRET_KEY" \
-e "OIDC_CLIENT_SECRETS=$OIDC_CLIENT_SECRETS" \
-e "PGHOST=$PGHOST" \
-e "PGDATABASE=$PGDATABASE" \
-e "PGSSLMODE=$PGSSLMODE" \
-e "PGUSER=$PGUSER" \
-e "PGPASSWORD=$PGPASSWORD" \
-p 8080:8080 \
$IMAGE_NAME

View File

@ -1,25 +1,27 @@
apiVersion: apps/v1 apiVersion: apps/v1
kind: Deployment kind: Deployment
metadata: metadata:
name: pv-stats name: numbers
labels: labels:
app: pv-stats app: numbers
annotations:
secret.reloader.stakater.com/reload: numbers
spec: spec:
replicas: 1 replicas: 1
selector: selector:
matchLabels: matchLabels:
app: pv-stats app: numbers
template: template:
metadata: metadata:
labels: labels:
app: pv-stats app: numbers
spec: spec:
containers: containers:
- name: pv-stats - name: numbers
image: %IMAGE% image: %IMAGE%
envFrom: envFrom:
- secretRef: - secretRef:
name: pv-stats name: numbers
ports: ports:
- containerPort: 8080 - containerPort: 8080
protocol: TCP protocol: TCP
@ -27,11 +29,11 @@ spec:
apiVersion: v1 apiVersion: v1
kind: Service kind: Service
metadata: metadata:
name: pv-stats name: numbers
spec: spec:
type: ClusterIP type: ClusterIP
selector: selector:
app: pv-stats app: numbers
ports: ports:
- name: http - name: http
targetPort: 8080 targetPort: 8080
@ -40,23 +42,23 @@ spec:
apiVersion: networking.k8s.io/v1 apiVersion: networking.k8s.io/v1
kind: Ingress kind: Ingress
metadata: metadata:
name: pv-stats name: numbers
annotations: annotations:
cert-manager.io/cluster-issuer: letsencrypt-production-http cert-manager.io/cluster-issuer: letsencrypt-production-http
spec: spec:
tls: tls:
- hosts: - hosts:
- pv-stats.hottis.de - numbers.hottis.de
secretName: pv-stats-cert secretName: numbers-cert
rules: rules:
- host: pv-stats.hottis.de - host: numbers.hottis.de
http: http:
paths: paths:
- path: / - path: /
pathType: Prefix pathType: Prefix
backend: backend:
service: service:
name: pv-stats name: numbers
port: port:
number: 80 number: 80

View File

@ -9,7 +9,7 @@ if [ "$GPG_PASSPHRASE" == "" ]; then
exit 1 exit 1
fi fi
IMAGE_NAME=gitea.hottis.de/wn/pv-stats IMAGE_NAME=gitea.hottis.de/wn/numbers
NAMESPACE=homea NAMESPACE=homea
DEPLOYMENT_DIR=$PWD/deployment DEPLOYMENT_DIR=$PWD/deployment
@ -19,12 +19,13 @@ kubectl create namespace $NAMESPACE \
-o yaml | \ -o yaml | \
kubectl -f - apply kubectl -f - apply
SECRETS_FILE=`mktemp` # SECRETS_FILE=`mktemp`
gpg --decrypt --passphrase $GPG_PASSPHRASE --yes --batch --homedir /tmp/.gnupg --output $SECRETS_FILE secrets.asc # gpg --decrypt --passphrase $GPG_PASSPHRASE --yes --batch --homedir /tmp/.gnupg --output $SECRETS_FILE secrets.asc
. $SECRETS_FILE # . $SECRETS_FILE
rm $SECRETS_FILE # rm $SECRETS_FILE
eval "`cat secrets.asc | /usr/local/bin/decrypt-secrets.sh`"
kubectl create secret generic pv-stats \ kubectl create secret generic numbers \
--dry-run=client \ --dry-run=client \
-o yaml \ -o yaml \
--save-config \ --save-config \

View File

@ -1,15 +1,15 @@
-----BEGIN PGP MESSAGE----- -----BEGIN PGP MESSAGE-----
jA0ECQMCjSYtdE7M+rT/0sEmAbK6tjOB7cduweJyS3sULIl017BrOM1HGV1v4QkN jA0ECQMC4tvOFCp2PT7/0sE+AcZmiGwroHYdWW/vJA1sCoMrM2oW8xUc1KndDGto
NWtceKHpJEFmk1ZFh1EmyS724bDxtVLm8qJN4VzV+cLPa8/IiTzOHJEg4PDRcc0U dFev2KcoZ4FRL9liCrJ7on773bZFCTDu2xiBNMeKF1p8drub1Ej5l61Oq15mLTYf
LRCZOb5PtfKWBcQdRshT3JhIxxOKG53ZKOHJ2VodZ/iqmpSbjZ39GtzLJoXlXwCJ XjXknFiWWq4PTzhhy53zvDyIV5yIcGfjotpGC83/qH5CBWIcCw9PZHI5+uHRVRKL
jHAHgADdvq5J7joSaozjxVCXQI2nOmlEcZosZVd6LZDvemWtmci4ARJtOEqad4Wi OpdTpekJ8ljPAGQ4F3vbOeBbG1PMoclx8r5SpKxLwQco2iaXd71dXHtPkEnLujd4
mbleupBF18pA7b63hfmHocO89b9pTpUZWw10b0SHrUiKZG6kHeQXE8GGu+4HiRcG ZRCthVNVfxrIIRcTJfgxewz2oJWYF9008nmtxpKzqfBtNpFIdBtnTTVvv2lfcVuS
xJN/yYe2Ly4tHwT54gQ7ytLrxOnJUQKelSpq370t/lrGTf8L4b2eq+tED8/7z5Ho S4eav9ljPPd/exZaT7fOGD/kuCdiiu6e0yGLzo2ykf0uBy7hG7ZJg8TH5e+LMBm9
E12VDreYLFaS2IeJCFBefiGGbKgttPcHZvDCPCUpCpfSShlqtTaidwsTq2lPFGyo Q1OFD+5LWeEsOEdSTlT5UbV5EaVEiawKWQn7rMZOyvBNiPwLaOlquHirWoll5eIm
0Lz9NHsqp4/U+4nabCWNVWhnZhwqIPYOt/kKsYvjq40U29q7RORnxz8l/Ym4vB1P OHgBVN0FiY/righpIoei/KP705FIg+hrpMUvc29PahiL8dgsxJycnKIo4t+2/nac
kLeouElJvAz6vt/1+nFQJb65YykTqKQpbKgbziFiUFwb37QzBqRO7tuv3/MUzJ1Q H144hP/rqBeaobG05TkZIr+Cpt4rpwwfNWOHgmNFHVpxdmPfoeAmpT4nz760hTEN
KB7+IOQ43vFbAK6DZAhJZtAspVYiZ95niVxepaghzf33/FquhtZ+XpFEBDv525z7 2ol4Qh8xL7n4GFiCpNg8tNZiZkPPa/aRUAFxgdq5beossvKukxCexQmlCFvxITSG
pML7BxFxuwF5q7HBTPtc4IeJBgGi16wm x9RwssFMnT+wqTuzBN8neBeEF1d9AtAFQKPtg3wkMUyJAlYkxiGS/2NJuYDVpmEQ
=K6Qq =Wr4f
-----END PGP MESSAGE----- -----END PGP MESSAGE-----

15
load-debug-env Normal file
View File

@ -0,0 +1,15 @@
SECRETS=`mktemp`
gpg --decrypt --passphrase $GPG_PASSPHRASE --yes --batch --output $SECRETS ./deployment/secrets.asc
. $SECRETS
rm $SECRETS
DB_NAMESPACE=database1
DB_DEPLOYNAME=database
REDIS_NAMESPACE=redis
REDIS_SERVICE_NAME=redis
PGHOST=`kubectl get services $DB_DEPLOYNAME -n $DB_NAMESPACE -o jsonpath="{.status.loadBalancer.ingress[0].ip}"`
REDISHOST=`kubectl get services $REDIS_SERVICE_NAME -n $REDIS_NAMESPACE -o jsonpath="{.status.loadBalancer.ingress[0].ip}"`
REDIS_URL=redis://$REDISHOST:6379/4

32
src/app.py Normal file
View File

@ -0,0 +1,32 @@
from flask import Flask, session, g, render_template_string
from flask_session import Session
from flask_oidc import OpenIDConnect
from werkzeug.middleware.proxy_fix import ProxyFix
from loguru import logger
import redis
import json
import os
try:
redis_url = os.environ['REDIS_URL']
oidc_client_secrets = os.environ['OIDC_CLIENT_SECRETS']
secret_key = os.environ['SECRET_KEY']
except KeyError as e:
logger.error(f"Required environment variable not set ({e})")
raise e
app = Flask(__name__)
app.config.update({
'SECRET_KEY': secret_key,
'SESSION_TYPE': 'redis',
'SESSION_REDIS': redis.from_url(redis_url),
'OIDC_CLIENT_SECRETS': json.loads(oidc_client_secrets),
'OIDC_SCOPES': 'openid email',
'OIDC_USER_INFO_ENABLED': True,
'SESSION_USE_SIGNER': True,
})
Session(app)
oidc = OpenIDConnect(app)

16
src/debug_routes.py Normal file
View File

@ -0,0 +1,16 @@
from loguru import logger
import json
from app import app
from app import oidc
@app.route('/token_debug', methods=['GET'])
@oidc.require_login
def token_debug():
# Access Token vom Identity Provider abrufen
access_token = oidc.get_access_token()
return json.dumps({
"access_token": access_token
})

129
src/ntp_routes.py Normal file
View File

@ -0,0 +1,129 @@
from flask import Flask, session, g, render_template_string, Response
from loguru import logger
import json
import matplotlib.pyplot as plt
import matplotlib.dates as mdates
from matplotlib.ticker import ScalarFormatter
import pandas as pd
import psycopg
import sqlalchemy
import time
import io
from app import app
from app import oidc
@app.route('/ntp/stratum-rootdisp.png')
def stratum_rootdisp_png():
dbh = psycopg.connect()
engine = sqlalchemy.create_engine("postgresql+psycopg://", creator=lambda: dbh)
query = """
select time_bucket('5 minutes', time) as bucket,
attributes->>'Label' as device,
avg(cast(values->'rootdisp'->>'value' as float)) as rootdisp,
max(cast(values->'stratum'->>'value' as int)) as stratum
from measurements
where time >= date_trunc('day', now()) AND time < date_trunc('day', now()) + '1 day'::interval and
application = 'SNMP' and attributes->>'Label' IN ('harrison', 'david')
group by bucket, attributes->>'Label'
order by bucket, attributes->>'Label'
"""
df = pd.read_sql(query, con=engine)
df['rootdisp'] = df['rootdisp'] / 1e6
# Extract date for title
plot_date = df['bucket'].dt.date.iloc[0] if not df.empty else "Unknown Date"
# Create figure with two side-by-side subplots
fig, axes = plt.subplots(1, 2, figsize=(15, 5), sharex=True)
for i, device in enumerate(['harrison', 'david']):
ax1 = axes[i]
ax2 = ax1.twinx()
device_df = df[df['device'] == device]
ax1.plot(device_df['bucket'], device_df['rootdisp'], 'r-', label='Root Dispersion')
ax1.set_xlabel('Time')
ax1.set_ylabel('Root Dispersion (ms)', color='r')
ax1.tick_params(axis='y', labelcolor='r')
ax2.plot(device_df['bucket'], device_df['stratum'], 'b-', label='Stratum')
ax2.set_ylabel('Stratum', color='b')
ax2.tick_params(axis='y', labelcolor='b')
ax2.set_yticks(range(int(device_df['stratum'].min()), int(device_df['stratum'].max()) + 1))
ax1.set_title(f'{device.capitalize()}')
ax1.xaxis.set_major_formatter(mdates.DateFormatter('%H:%M'))
fig.autofmt_xdate(rotation=45)
fig.suptitle(f'Stratum and Root Dispersion - {plot_date}')
fig.tight_layout()
img_io = io.BytesIO()
plt.savefig(img_io, format='png')
img_io.seek(0)
plt.close(fig)
return Response(img_io, mimetype='image/png')
@app.route('/ntp/packets-load.png')
def packets_load_png():
dbh = psycopg.connect()
engine = sqlalchemy.create_engine("postgresql+psycopg://", creator=lambda: dbh)
query = """
select time_bucket('5 minutes', time) as bucket,
attributes->>'Label' as device,
avg(cast(values->'load1'->>'value' as float)) as load,
avg(cast(values->'processed-pkts'->>'value' as int)) as packets
from measurements
where time >= date_trunc('day', now()) AND time < date_trunc('day', now()) + '1 day'::interval and
application = 'SNMP' and attributes->>'Label' IN ('harrison', 'david')
group by bucket, attributes->>'Label'
order by bucket, attributes->>'Label'
"""
df = pd.read_sql(query, con=engine)
# Extract date for title
plot_date = df['bucket'].dt.date.iloc[0] if not df.empty else "Unknown Date"
# Create figure with two side-by-side subplots
fig, axes = plt.subplots(1, 2, figsize=(15, 5), sharex=True)
for i, device in enumerate(['harrison', 'david']):
ax1 = axes[i]
ax2 = ax1.twinx()
device_df = df[df['device'] == device]
ax1.plot(device_df['bucket'], device_df['load'], 'r-', label='CPU Load')
ax1.set_xlabel('Time')
ax1.set_ylabel('Load', color='r')
ax1.tick_params(axis='y', labelcolor='r')
ax2.plot(device_df['bucket'], device_df['packets'], 'b-', label='Processed Packets')
ax2.set_ylabel('Packets', color='b')
ax2.tick_params(axis='y', labelcolor='b')
ax1.set_title(f'{device.capitalize()}')
ax1.xaxis.set_major_formatter(mdates.DateFormatter('%H:%M'))
fig.autofmt_xdate(rotation=45)
fig.suptitle(f'CPU Load and Processed Packets - {plot_date}')
fig.tight_layout()
img_io = io.BytesIO()
plt.savefig(img_io, format='png')
img_io.seek(0)
plt.close(fig)
return Response(img_io, mimetype='image/png')

117
src/pv_routes.py Normal file
View File

@ -0,0 +1,117 @@
from flask import Flask, session, g, render_template_string
from loguru import logger
import json
import plotly.express as px
import plotly.graph_objects as po
import pandas as pd
import psycopg
import sqlalchemy
import time
from app import app
from app import oidc
@app.route('/pvstats')
@oidc.require_login
def pvstats():
try:
stepX_time = time.time()
dbh = psycopg.connect()
engine = sqlalchemy.create_engine("postgresql+psycopg://", creator=lambda: dbh)
step0_time = time.time()
df = pd.read_sql("SELECT month, cast(year AS varchar), current_energy AS value FROM pv_energy_by_month", con=engine)
step1_time = time.time()
duration1 = step1_time - step0_time
logger.info(f"{duration1=}")
fig_1 = px.bar(df, x='month', y='value', color='year', barmode='group')
step2_time = time.time()
duration2 = step2_time - step1_time
logger.info(f"{duration2=}")
fig_1.update_layout(
title=f"Jahreswerte Exportierte Energie {duration1:.3f}, {duration2:.3f}",
xaxis_title="",
yaxis_title="",
legend_title="Jahr",
xaxis=dict(
tickmode="array",
tickvals=list(range(1, 13)), # Monate 112
ticktext=["Jan", "Feb", "Mär", "Apr", "Mai", "Jun", "Jul", "Aug", "Sep", "Okt", "Nov", "Dez"]
),
yaxis=dict(ticksuffix=" kWh")
)
graph_html_1 = fig_1.to_html(full_html=False, default_height='30%')
step3_time = time.time()
df = pd.read_sql("SELECT time_bucket('5 minutes', time) AS bucket, AVG(power) AS avg_power FROM pv_power_v WHERE time >= date_trunc('day', now()) - '1 day'::interval AND time < date_trunc('day', now()) GROUP BY bucket ORDER BY bucket", con=engine)
step4_time = time.time()
duration3 = step4_time - step3_time
logger.info(f"{duration3=}")
fig_2 = px.line(df, x='bucket', y='avg_power')
step5_time = time.time()
duration4 = step5_time - step4_time
logger.info(f"{duration4=}")
fig_2.update_layout(
xaxis_title="",
yaxis_title="",
title=f"Export gestern {duration3:.3f}, {duration4:.3f}",
yaxis=dict(ticksuffix=" W")
)
graph_html_2 = fig_2.to_html(full_html=False, default_height='30%')
step6_time = time.time()
df = pd.read_sql("SELECT time_bucket('5 minutes', time) AS bucket, AVG(power) AS avg_power FROM pv_power_v WHERE time >= date_trunc('day', now()) AND time < date_trunc('day', now()) + '1 day'::interval GROUP BY bucket ORDER BY bucket", con=engine)
step7_time = time.time()
duration5 = step7_time - step6_time
logger.info(f"{duration5=}")
fig_3 = px.line(df, x='bucket', y='avg_power')
step8_time = time.time()
duration6 = step8_time - step7_time
logger.info(f"{duration6=}")
fig_3.update_layout(
xaxis_title="",
yaxis_title="",
title=f"Export heute {duration5:.3f}, {duration6:.3f}",
yaxis=dict(ticksuffix=" W")
)
graph_html_3 = fig_3.to_html(full_html=False, default_height='30%')
stepZ_time = time.time()
duration7 = stepZ_time - stepX_time
logger.info(f"{duration7=}")
return render_template_string(f"""
<html>
<head>
<title>Jahreswerte PV-Energie</title>
</head>
<body>
{graph_html_1}
{graph_html_2}
{graph_html_3}
<div style="height:9vh; background-color:lightgrey; font-family: Courier, Consolas, monospace;">
<table style="border-collapse: collapse;">
<style>
td.smallsep {{ padding-right: 10px }}
td.largesep {{ padding-right: 30px }}
</style>
<tr>
<td class="smallsep">Query 1:</td><td class="largesep"> {duration1:.3f} s</td><td class="smallsep">Graph 1:</td><td> {duration2:.3f} s</td>
</tr><tr>
<td class="smallsep">Query 2:</td><td class="largesep"> {duration3:.3f} s</td><td class="smallsep">Graph 2:</td><td> {duration4:.3f} s</td>
</tr><tr>
<td class="smallsep">Query 3:</td><td class="largesep"> {duration5:.3f} s</td><td class="smallsep">Graph 3:</td><td> {duration6:.3f} s</td>
</tr><tr>
<td class="smallsep">Total:</td><td> {duration7:.3f} s</td><td></td><td></td>
</tr>
</table>
</div>
</body>
</html>
""")
except Exception as e:
raise Exception(f"Error when querying energy export values: {e}")
finally:
if dbh is not None:
dbh.close()

View File

@ -38,3 +38,6 @@ tzdata==2025.1
urllib3==2.3.0 urllib3==2.3.0
Werkzeug==3.1.3 Werkzeug==3.1.3
zipp==3.21.0 zipp==3.21.0
pillow==11.1.0
matplotlib==3.10.1

24
src/routes.py Normal file
View File

@ -0,0 +1,24 @@
from flask import abort, Response
from PIL import Image, ImageDraw
import io
from app import app
from app import oidc
@app.route('/')
def index():
abort(404)
@app.route('/generate_image')
def generate_image():
img = Image.new('RGB', (200, 100), color=(255, 255, 255))
draw = ImageDraw.Draw(img)
draw.text((50, 40), "Hello, Flask!", fill=(0, 0, 0)) # Schwarzer Text
img_io = io.BytesIO()
img.save(img_io, 'PNG')
img_io.seek(0) # Zeiger zurücksetzen
return Response(img_io, mimetype='image/png')

View File

@ -1,90 +1,16 @@
from flask import Flask, session, g, render_template_string
from flask_session import Session
from flask_oidc import OpenIDConnect
from werkzeug.middleware.proxy_fix import ProxyFix from werkzeug.middleware.proxy_fix import ProxyFix
from loguru import logger from loguru import logger
import redis
import json
import os
import plotly.express as px
import pandas as pd
import psycopg
import sqlalchemy
try:
redis_url = os.environ['REDIS_URL']
oidc_client_secrets = os.environ['OIDC_CLIENT_SECRETS']
secret_key = os.environ['SECRET_KEY']
except KeyError as e:
logger.error(f"Required environment variable not set ({e})")
raise e
app = Flask(__name__)
app.config.update({
'SECRET_KEY': secret_key,
'SESSION_TYPE': 'redis',
'SESSION_REDIS': redis.from_url(redis_url),
'OIDC_CLIENT_SECRETS': json.loads(oidc_client_secrets),
'OIDC_SCOPES': 'openid email',
'OIDC_USER_INFO_ENABLED': True,
'SESSION_USE_SIGNER': True,
})
Session(app)
oidc = OpenIDConnect(app)
@app.route('/')
@oidc.require_login
def index():
counter = int(session.get('counter', '0'))
counter += 1
session['counter'] = f"{counter}"
return f"Hello, Flask! Called for the {counter}. time."
@app.route('/plot')
@oidc.require_login
def plot():
try:
dbh = psycopg.connect()
engine = sqlalchemy.create_engine("postgresql+psycopg://", creator=lambda: dbh)
df = pd.read_sql("SELECT month, cast(year AS varchar), current_energy AS value FROM pv_energy_by_month", con=engine)
fig = px.bar(df, x='month', y='value', color='year', barmode='group')
fig.update_layout(
title="Jahreswerte Exportierte Energie",
xaxis_title="Monat",
yaxis_title="",
legend_title="Jahr",
xaxis=dict(
tickmode="array",
tickvals=list(range(1, 13)), # Monate 112
ticktext=["Jan", "Feb", "Mär", "Apr", "Mai", "Jun", "Jul", "Aug", "Sep", "Okt", "Nov", "Dez"]
),
yaxis=dict(ticksuffix=" kWh")
)
graph_html = fig.to_html(full_html=False)
return render_template_string(f"""
<html>
<head>
<title>Jahreswerte PV-Energie</title>
</head>
<body>
{graph_html}
</body>
</html>
""")
except Exception as e:
raise Exception(f"Error when querying energy export values: {e}")
finally:
if dbh is not None:
dbh.close()
from app import app
import routes
import debug_routes
import pv_routes
import ntp_routes
if __name__ == '__main__': if __name__ == '__main__':
app.run(port=8080) app.run(host='0.0.0.0', port=8080)
else: else:
exposed_app = ProxyFix(app, x_for=1, x_host=1) exposed_app = ProxyFix(app, x_for=1, x_host=1)