Compare commits

...

64 Commits

Author SHA1 Message Date
f63c22912a float fix 2
All checks were successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2026-03-06 20:57:32 +01:00
c37420a993 float fix 1
All checks were successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2026-03-06 20:51:50 +01:00
ac0c417a48 small j for float 3
All checks were successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2026-03-06 20:34:21 +01:00
dc965aeba6 small j for float 2
All checks were successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2026-03-06 20:27:28 +01:00
b940f715c0 small j for float
Some checks failed
ci/woodpecker/tag/woodpecker Pipeline failed
2026-03-06 20:25:04 +01:00
8c5626942f fix config 3
All checks were successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2026-03-06 12:30:04 +01:00
6d0dc12ac1 fix config 2
All checks were successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2026-03-06 11:28:28 +01:00
6a4aac4140 fix config
All checks were successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2026-03-06 11:24:02 +01:00
77d23e39cf add new shellies 2
All checks were successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2026-03-04 23:27:48 +01:00
e28042f3be add new shellies
All checks were successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2026-03-04 23:25:59 +01:00
e1ad76f703 fix
All checks were successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2026-03-04 12:10:29 +01:00
6dac149a48 influxdb url
All checks were successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2026-03-04 11:21:55 +01:00
691ebdeadd prepare additional deployment
All checks were successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2026-03-04 11:07:31 +01:00
6ef80f8438 z2m working again 2026-02-16 17:07:23 +01:00
47116904fc changes 2026-02-09 15:36:35 +01:00
5b46ecb0b1 fields and tags 2026-02-04 21:32:32 +01:00
a1ea1b230e communication with influxdb is working, schema of data in influxdb is unusable so far, too many spare columns 2026-02-04 17:19:12 +01:00
97679561d8 changes for influxdb 2026-02-04 14:58:13 +01:00
a78c6952f0 voltage hack
All checks were successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-12-22 18:50:59 +01:00
3b69e1e2af car_values_v
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
2025-12-15 17:28:15 +01:00
2dfca8d70a dockerize and deploy only for tag
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-12-15 14:55:49 +01:00
0352b720cd car and gpg
Some checks failed
ci/woodpecker/push/woodpecker Pipeline failed
2025-12-15 14:53:49 +01:00
95984157e8 add car powermeter 2025-12-15 14:19:18 +01:00
61509c0000 queries
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
2025-02-10 15:40:37 +01:00
3c09c04066 rename snmp handler to prepared handler, 4
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-02-10 14:07:25 +01:00
af739c7148 rename snmp handler to prepared handler, 3
Some checks failed
ci/woodpecker/push/woodpecker Pipeline failed
2025-02-10 14:03:22 +01:00
33ff176c79 rename snmp handler to prepared handler, 2
Some checks failed
ci/woodpecker/push/woodpecker Pipeline failed
2025-02-10 14:01:50 +01:00
134e3706cc rename snmp handler to prepared handler
Some checks failed
ci/woodpecker/push/woodpecker Pipeline failed
2025-02-10 13:58:01 +01:00
3a56309b9f tsm
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-02-10 13:11:31 +01:00
084645f002 update of go modules
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-02-04 16:34:49 +01:00
9815371199 filter for build step
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-02-04 16:25:03 +01:00
4debe45592 fix new ci, 7
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-02-04 16:04:59 +01:00
71773968c9 fix new ci, 6
Some checks failed
ci/woodpecker/push/woodpecker Pipeline failed
2025-02-04 16:02:52 +01:00
574e2886f5 fix new ci, 5
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
2025-02-04 15:42:02 +01:00
e25693fb84 fix new ci, 4
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
2025-02-04 15:11:49 +01:00
ff49d285dc fix new ci, 3
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
2025-02-04 15:10:14 +01:00
8c3977162b fix new ci, 2
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
2025-02-04 15:07:31 +01:00
b99b47ca40 fix new ci, 1
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
2025-02-04 15:05:53 +01:00
c40805b4cb change Dockerfile, introduce separate build step
Some checks failed
ci/woodpecker/push/woodpecker Pipeline failed
2025-02-04 15:01:49 +01:00
a2eb38b414 sbon, 6
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
2025-02-03 17:30:05 +01:00
64cf45e22f sbon, 5
Some checks failed
ci/woodpecker/push/woodpecker Pipeline failed
2025-02-03 17:27:19 +01:00
9310a86687 sbon, 4
Some checks failed
ci/woodpecker/push/woodpecker Pipeline failed
2025-02-03 17:24:07 +01:00
f4b404e2b1 sbon, 3
Some checks failed
ci/woodpecker/push/woodpecker Pipeline failed
2025-02-03 17:20:18 +01:00
29148a13f4 sbon, 2
Some checks failed
ci/woodpecker/push/woodpecker Pipeline failed
2025-02-03 17:18:44 +01:00
0356e9dcee sbon
Some checks failed
ci/woodpecker/push/woodpecker Pipeline failed
2025-02-03 17:14:11 +01:00
a5b981357d enable license scanning, 2
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
2025-02-03 16:51:16 +01:00
57bbc6135e enable license scanning
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
2025-02-03 16:43:46 +01:00
d704f7ba5e python module updated too
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
2025-02-03 14:38:14 +01:00
d0567a48f1 update modules
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
2025-02-03 14:35:44 +01:00
ee22996433 sbom in ci, 3
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
2025-02-03 14:26:07 +01:00
4130befdbf sbom in ci, 2
Some checks failed
ci/woodpecker/push/woodpecker Pipeline failed
2025-02-03 14:23:45 +01:00
77c5df0697 sbom in ci 2025-02-03 14:21:41 +01:00
d4ee4c49de new database
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-01-09 16:18:53 +01:00
ae938d10b9 debug
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-01-09 14:53:12 +01:00
799ef9e00b Merge branch 'main' of gitea.hottis.de:wn/universal-data-ingest
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2025-01-09 14:49:24 +01:00
311e732841 debug 2025-01-09 14:49:15 +01:00
51e482e94e fix
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
2024-12-13 10:27:02 +01:00
a1b98d3438 more debugging
Some checks failed
ci/woodpecker/push/woodpecker Pipeline failed
2024-12-13 10:02:32 +01:00
166c414af1 more debugging for database issue
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2024-12-12 23:23:32 +01:00
cedb1dfa5a more debugging for database issue
Some checks failed
ci/woodpecker/push/woodpecker Pipeline failed
2024-12-12 23:21:09 +01:00
a21fae4f8a view
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
2024-12-04 13:17:06 +01:00
1b6ac5d762 gy21, fix
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2024-12-04 12:52:03 +01:00
95831d5e47 gy21
Some checks failed
ci/woodpecker/push/woodpecker Pipeline failed
2024-12-04 12:49:49 +01:00
c3dce9faab disable debugging code
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2024-11-15 11:26:26 +01:00
58 changed files with 1279 additions and 1278 deletions

View File

@@ -1,32 +1,39 @@
when:
- event: tag
steps:
build:
image: golang:1.22.5-alpine3.20
commands:
- GOPATH=/woodpecker/go
- cd src/udi
- go mod tidy
- go build -a -installsuffix nocgo -o udi main.go
- cp udi ../..
dockerize:
image: plugins/kaniko
settings:
repo: ${FORGE_NAME}/${CI_REPO}
registry:
from_secret: container_registry
tags: latest,${CI_COMMIT_SHA},${CI_COMMIT_TAG}
from_secret: local_registry
tags: latest,${CI_COMMIT_TAG}
username:
from_secret: container_registry_username
from_secret: local_username
password:
from_secret: container_registry_password
from_secret: local_password
dockerfile: Dockerfile
when:
- event: [push, tag]
deploy:
image: portainer/kubectl-shell:latest
secrets:
- source: kube_config
target: KUBE_CONFIG_CONTENT
- source: encryption_key
target: ENCRYPTION_KEY
- source: secrets_checksum
target: MD5_CHECKSUM
image: quay.io/wollud1969/k8s-admin-helper:0.4.1
environment:
KUBE_CONFIG_CONTENT:
from_secret: kube_config
GPG_PASSPHRASE:
from_secret: gpg_passphrase
commands:
- export IMAGE_TAG=$CI_COMMIT_TAG
- printf "$KUBE_CONFIG_CONTENT" > /tmp/kubeconfig
- export KUBECONFIG=/tmp/kubeconfig
- ./deployment/deploy.sh
when:
- event: tag

View File

@@ -1,15 +1,8 @@
FROM golang:1.22.5-alpine3.20 as builder
RUN mkdir -p /go/src
COPY ./src/ /go/src
WORKDIR /go/src/udi
RUN go build -a -installsuffix nocgo -o udi main.go
FROM scratch
ENV UDI_CONF ""
COPY --from=builder /go/src/udi ./
COPY udi ./
ENTRYPOINT ["./udi"]

View File

@@ -0,0 +1,55 @@
package hottisGy21
import (
//"log"
"fmt"
"bytes"
"encoding/base64"
"encoding/binary"
"udi/database"
)
type hottisGy21Values struct {
Connected uint8
Status uint8
RawHumidity uint16
RawTemperature uint16
}
func Parse(fPort int, _ []byte, frmPayload string, variables *map[string]database.VariableType, attributes *map[string]interface{}, _ *database.Device) error {
if fPort != 2 {
return fmt.Errorf("Unexpected fPort %d", fPort)
}
b, err := base64.StdEncoding.DecodeString(frmPayload)
if err != nil {
return fmt.Errorf("Unable to base64-decode payload: %v", err)
}
var values hottisGy21Values
err = binary.Read(bytes.NewReader(b), binary.LittleEndian, &values)
if err != nil {
return fmt.Errorf("Unable to cast into struct: %v", err)
}
var temperature float32 = -46.85 + 175.72 * (float32(values.RawTemperature) / 65536.0)
var humidity float32 = -6 + 125 * (float32(values.RawHumidity) / 65536.0);
// log.Printf("CO2: %f, Temp: %f, Hum: %f, Status: %d", co2concentration, temperature, humidity, values.Status)
(*variables)["Humidity"] = database.VariableType {
Label: "Humidity",
Variable: "Humidity",
Unit: "%",
Value: humidity,
}
(*variables)["Temperature"] = database.VariableType {
Label: "Temperature",
Variable: "Temperature",
Unit: "°C",
Value: temperature,
}
(*attributes)["Status"] = values.Status
return nil
}

View File

@@ -2,7 +2,7 @@ package ttn
import (
"fmt"
//"log"
"log"
"time"
"encoding/json"
"udi/config"
@@ -14,6 +14,7 @@ import (
"udi/handlers/ttn/models/draginoLsn50"
"udi/handlers/ttn/models/rawPayloadPrinter"
"udi/handlers/ttn/models/hottisScd30"
"udi/handlers/ttn/models/hottisGy21"
"udi/handlers/ttn/models/hottisThreeWayThermometer"
"udi/database"
)
@@ -85,6 +86,7 @@ func New(id string, config config.HandlerConfigT) handler.Handler {
}
t.Id = id
t.dbh = database.NewDatabaseHandle()
log.Printf("Handler TTN %d initialized", id)
return t
}
@@ -153,6 +155,8 @@ func (self *TTNHandler) Handle(message handler.MessageT) {
parser = rawPayloadPrinter.Parse
case "hottis-scd30":
parser = hottisScd30.Parse
case "hottis-gy21":
parser = hottisGy21.Parse
case "hottis-threeway-thermometer":
parser = hottisThreeWayThermometer.Parse
default:

View File

@@ -1,43 +0,0 @@
#!/bin/bash
if [ "$ENCRYPTION_KEY" = "" ]; then
echo "ENCRYPTION_KEY not set"
exit 1
fi
if [ "$MD5_CHECKSUM" = "" ]; then
echo "No checksum given"
exit 1
fi
SECRETS_CIPHERTEXT_FILE=secrets.enc
SECRETS_PLAINTEXT_FILE=/tmp/secrets
TMP_FILE=`mktemp`
POD_NAME_SUFFIX=`date +%s`
cat $SECRETS_CIPHERTEXT_FILE | \
kubectl run openssl-$POD_NAME_SUFFIX \
--rm \
--image bitnami/debian-base-buildpack:latest \
--env KEY=$ENCRYPTION_KEY \
-i \
-q \
-- \
/bin/sh -c "openssl enc -aes-256-cbc -salt -pass env:KEY -a -d" > \
$TMP_FILE
if [ `uname` = "Darwin" ]; then
CALCULATED_CHECKSUM=`cat $TMP_FILE | md5`
elif [ `uname` = "Linux" ]; then
CALCULATED_CHECKSUM=`cat $TMP_FILE | md5sum - | awk '{print $1}'`
fi
if [ "$MD5_CHECKSUM" != "$CALCULATED_CHECKSUM" ]; then
echo "Invalid checksum"
exit 1
fi
# cat $TMP_FILE
mv $TMP_FILE $SECRETS_PLAINTEXT_FILE

View File

@@ -17,7 +17,7 @@ metadata:
labels:
app: udi
annotations:
secret.reloader.stakater.com/reload: "%PRE%-udi-conf,%PRE%-udi-db-cred,%PRE%-mqtt-password"
secret.reloader.stakater.com/reload: "%PRE%-udi-conf,%PRE%-udi-db-cred,%PRE%-mqtt-password,%PRE%-udi-influxdb-cred"
spec:
replicas: 1
selector:
@@ -36,6 +36,8 @@ spec:
name: %PRE%-udi-db-cred
- secretRef:
name: %PRE%-mqtt-password
- secretRef:
name: %PRE%-udi-influxdb-cred
- configMapRef:
name: %PRE%-udi-conf
volumeMounts:

View File

@@ -4,6 +4,11 @@ if [ "$IMAGE_TAG" == "" ]; then
echo "Make sure IMAGE_TAG is set"
exit 1
fi
if [ "$GPG_PASSPHRASE" == "" ]; then
echo "Make sure GPG_PASSPHRASE is set"
exit 1
fi
IMAGE_NAME=$FORGE_NAME/$CI_REPO
@@ -15,7 +20,10 @@ DEPLOYMENT_DIR=$PWD/deployment
INSTANCES_DIR=$DEPLOYMENT_DIR/instances
pushd $DEPLOYMENT_DIR > /dev/null
./decrypt-secrets.sh || exit 1
# ./decrypt-secrets.sh || exit 1
# . /tmp/secrets
gpg --decrypt --yes --batch --passphrase "$GPG_PASSPHRASE" --homedir /tmp/.gnupg -o /tmp/secrets secrets.asc
. /tmp/secrets
rm /tmp/secrets
popd > /dev/null
@@ -58,7 +66,8 @@ for NAMESPACE_DIR in `find $INSTANCES_DIR -type d -mindepth 1 -maxdepth 1`; do
NEW_UDI_DB_PASSWORD="${!PASSWORD_VARIABLE}"
DATABASE_VARIABLE=$VARIABLE_PREFIX"_PGDATABASE"
NEW_UDI_DB_DATABASE="${!DATABASE_VARIABLE}"
NEW_UDI_DB_HOST=timescaledb.database.svc.cluster.local
NEW_UDI_DB_HOST=database.database1.svc.cluster.local
INFLUXDB_URL=$VARIABLE_PREFIX"_INFLUXDB_URL"
kubectl create secret generic $INSTANCE-udi-db-cred \
--dry-run=client \
@@ -71,6 +80,13 @@ for NAMESPACE_DIR in `find $INSTANCES_DIR -type d -mindepth 1 -maxdepth 1`; do
--from-literal=PGSSLMODE="require" | \
kubectl apply -f - -n $NAMESPACE
kubectl create secret generic $INSTANCE-udi-influxdb-cred \
--dry-run=client \
-o yaml \
--save-config \
--from-literal=INFLUXDB_URL="${!INFLUXDB_URL}" | \
kubectl apply -f - -n $NAMESPACE
# set configuration as configMap
kubectl create configmap $INSTANCE-udi-conf \
--from-literal=UDI_CONF="`cat $CONFIG_FILE`" \

View File

@@ -1,27 +0,0 @@
#!/bin/bash
ENCRYPTION_KEY=`openssl rand -hex 32`
echo $ENCRYPTION_KEY
SECRETS_PLAINTEXT_FILE=secrets.txt
SECRETS_CIPHERTEXT_FILE=secrets.enc
if [ `uname` = "Darwin" ]; then
cat $SECRETS_PLAINTEXT_FILE | md5
elif [ `uname` = "Linux" ]; then
cat $SECRETS_PLAINTEXT_FILE | md5sum - | awk '{print $1}'
fi
POD_NAME_SUFFIX=`date +%s`
cat $SECRETS_PLAINTEXT_FILE | \
kubectl run openssl-$POD_NAME_SUFFIX \
--rm \
--image bitnami/debian-base-buildpack:latest \
--env KEY=$ENCRYPTION_KEY \
-i \
-q \
-- \
/bin/sh -c "openssl enc -aes-256-cbc -salt -pass env:KEY -a" > \
$SECRETS_CIPHERTEXT_FILE

View File

@@ -0,0 +1,226 @@
{
"mqtt": {
"broker": "mqtt://mosquitto-broker-mqtt-anon-cluster.mosquitto.svc.cluster.local:1883",
"tlsEnable": "false"
},
"topicMappings": [
{
"topics": [ "snmp" ],
"handler": "PREP",
"id": "SNMP",
"config": {
"attributes": {
}
}
},
{
"topics": [ "tsm" ],
"handler": "PREP",
"id": "TSM",
"config": {
"attributes": {
}
}
},
{
"topics": [ "dt1/ai/periodic/1" ],
"handler": "DT1T",
"id": "DT1T.0",
"config": {
"attributes": {
"Application": "Temperature Wago",
"Device": "Freezer",
"HardLow": "-273",
"SoftLow": "-50",
"SoftHigh": "20",
"HardHigh": "100"
}
}
},
{
"topics": [ "dt1/ai/periodic/3" ],
"handler": "DT1T",
"id": "DT1T.1",
"config": {
"attributes": {
"Application": "Temperature Wago",
"Device": "Outdoor",
"HardLow": "-273",
"SoftLow": "-60",
"SoftHigh": "60",
"HardHigh": "100"
}
}
},
{
"topics": [ "IoT/PV/Values" ],
"handler": "PV",
"id": "PV",
"config": {
"databaseConnStr": "",
"attributes": {
}
}
},
{
"topics": [ "IoT/Car/Values" ],
"handler": "Car",
"id": "Car",
"config": {
"databaseConnStr": "",
"attributes": {
}
}
},
{
"topics": [ "locative/event/#" ],
"handler": "Locative",
"id": "Locative",
"config": {
"databaseConnStr": "",
"attributes": {
}
}
},
{
"topics": [ "IoT/MBGW3/Measurement" ],
"handler": "MBGW3",
"id": "MBGW3",
"config": {
"databaseConnStr": "",
"attributes": {
}
}
},
{
"topics": [ "IoT/OneWireGW/Bus 1/#" ],
"handler": "SVER",
"id": "SVER0",
"config": {
"databaseConnStr": "",
"attributes": {
"application": "Temperature Heating",
"payloadRegex": "(\\d+(\\.\\d+)?)\\s*([^0-9\\s]\\S*)",
"deviceFrom": "topic",
"devicePart": "3",
"valueFrom": "payload",
"valuePart": "1",
"valueType": "float",
"unitFrom": "payload",
"unitPart": "3"
}
}
},
{
"topics": [ "NR/Multisensor/+/Temperatur" ],
"handler": "SVEJ",
"id": "SVEJ0",
"config": {
"databaseConnStr": "",
"attributes": {
"application": "Temperature Multisensor",
"deviceSelector": "T:2",
"valueSelector": "J:$.CurrentTemperature",
"unitSelector": "C:°C"
}
}
},
{
"topics": [ "NR/Multisensor/+/Feuchte" ],
"handler": "SVEJ",
"id": "SVEJ1",
"config": {
"databaseConnStr": "",
"attributes": {
"application": "Humidity Multisensor",
"deviceSelector": "T:2",
"valueSelector": "J:$.CurrentRelativeHumidity",
"unitSelector": "C:%"
}
}
},
{
"topics": [ "zigbee2mqtt/+" ],
"handler": "Z2M",
"id": "Z2M",
"config": {
"databaseConnStr": "",
"attributes": {
}
}
},
{
"topics": [ "shellyplusht/+/status/temperature:0" ],
"handler": "SVEJ",
"id": "SVEJ2",
"config": {
"databaseConnStr": "",
"attributes": {
"application": "Temperature Shelly Plus HT",
"deviceSelector": "T:1",
"valueSelector": "J:$.tC",
"unitSelector": "C:°C"
}
}
},
{
"topics": [ "shellyplusht/+/status/humidity:0" ],
"handler": "SVEJ",
"id": "SVEJ3",
"config": {
"databaseConnStr": "",
"attributes": {
"application": "Humidity Shelly Plus HT",
"deviceSelector": "T:1",
"valueSelector": "J:$.rh",
"unitSelector": "C:%"
}
}
},
{
"topics": [ "shellies/sensor/+/status/temperature:0" ],
"handler": "SVEJ",
"id": "SVEJ4",
"config": {
"databaseConnStr": "",
"attributes": {
"application": "Shellies Sensor Temperature",
"deviceSelector": "T:2",
"valueSelector": "J:$.tC",
"unitSelector": "C:°C"
}
}
},
{
"topics": [ "shellies/sensor/+/status/humidity:0" ],
"handler": "SVEJ",
"id": "SVEJ5",
"config": {
"databaseConnStr": "",
"attributes": {
"application": "Shellies Sensor Humidity",
"deviceSelector": "T:2",
"valueSelector": "J:$.rh",
"unitSelector": "C:%"
}
}
},
{
"topics": [ "shellies/sensor/+/status/devicepower:0" ],
"handler": "SVEJ",
"id": "SVEJ6",
"config": {
"databaseConnStr": "",
"attributes": {
"application": "Shellies Sensor Power",
"deviceSelector": "T:2",
"valueSelector": "J:$.battery.percent",
"unitSelector": "C:%"
}
}
}
],
"archiver": {
"dir": "/archive"
}
}

8
deployment/secrets.asc Normal file
View File

@@ -0,0 +1,8 @@
-----BEGIN PGP MESSAGE-----
jA0ECQMIYUoTHR96Qfb90psBoxuk38UXPXTWPCmdW690bi2+w34S4NLHZvHfe3Ra
nck319+PXvr0agfHGZ733hhTQv4sa8I2o6ICrgFqtKGfHmgnqL5kYNP9+NuV/IsF
x3dxwjEejsZ5GYn/zk+CQceItQ8nyyJc2ms1KwTu2r4hMzuHmnVtvKxNCzPrw2N5
SJIRhh41eequFkzELQqqXXu10raBFsttOemVhA==
=TySu
-----END PGP MESSAGE-----

View File

@@ -1,38 +0,0 @@
U2FsdGVkX1+v6L4gc+CbYCZyo/UVN7QfmEntIBpk+GAHGf3d7m/4hfcYd39Eh2td
lXSmNdt1cdFw/UfZ1x1OlGm/fqLh/j/rWPgEc6BwEcDFDEXpTucTjUHNDonYNH8j
eDWeAGokfguqgQG16CBLHdeyocP0kTPJSrIKQgG1Mzzck/kfB1Z6Ggv4z5KEx2dy
2rrnm+BeFT1yITwoxa3iJeudcSQznNIqQa+Mx4fUsPV+yorahp4gs0PVVj9POnAT
yRhpQgkaq5oZNVcYrWS5+6mmhbzL5jIAa4wfzVep/69RcfBkV5Oj5JJGaQzH0T74
wg8dWz/scdi2kkCn0KroJPrsG/lAsFYhbX4kUJQeRUX1pWr/iwD0i8LRx+f2C82Y
HgpsnG6c5nPRy68TltgRgCRAIJj87rR/fATVowcpChfe9sXCwfLEZ5Q2hDK8eAPW
VS87axMkProyHJZe1GK0v9CAVWpXlxv6eAr8u2SftGA87Xu3ebQ4SjReXIcAb7M6
08UnxW4YcfH+usgU2GUuNlzRctAq334AfBWYQO51l/ELJAzaDi6Ht4Czr6R7Bsfh
M3ZcjcgqY7j7ywDFmKq/a8Q0Dsjm2sezNtrrRWusomgSKFEf8WncOdkcWOAiza4T
+Qubfr1SuZuWFF+migGtYM3X8YS+VpmMRIpJ1otibMELgjvldWGqHIK1uIThLq7F
MvQ0Nog6UNg79/8vrUoEUPPB5fQsXcNC5zcpVMrpJcGogBHhsXk1EPFcB75sx/65
bl2BZlCBacH9MNIBPh17dMC46EV1FNaLiO1N3/qJkxrkiG5wBDjDlnyMn/mYc/o1
olNuIO0nnn2x8ZU02lRo8RqcqOywseZeBhAzOj+899n5Qa/0YQAnb0Y9WAxqLft/
0C45HcK5Kgd3C6wqvVUqcQ/UMxQzv0y1cM8gbfpGjUvJ6gUj7vkW08D55A6gV8Lf
SrneWAP/1B1mmV16vHaXwoYpTpQwM7i7fHWBOpH7nq6E+0P3LHyon43dYo4P6KM7
He3R6phTFp36WI4ZCUQafTDZS196Ol2ZyEAonVwSOIEIyptXeoAmleolXC/eL84Z
bEbhld8g+ulrVSrBXFpCY3jBsqPVBYEpZaGYgevsrHPSbwWa/qQkTKnOO4+oz6Pe
9iJ1yJbSWfg6Gkr6iqE41Dp4VGXtwTDHHb9YMd56iWHAkxZLFIWdYUr8XfQS6j70
j5kV3jV/w5EHGYruBdtxAWc7YKq3pfqvh9R7dD/8JOFZhA140+zmOCWG4qdDhv+5
F9vlawudssa9ZHGi1jBFPCNW13LBhUdyCY3apKF4HHeeuA465uzxIqwtkJSigdun
vC9ooYZrJjYOnJSTJnKH0WSD0pPC6CIkge+Fxuksq6cst5Zcysw1xz5zs7UNeAP+
kLs1+8Kn2d1hJuzSWdWlj7xGratLEdA6pqcfBKvMYtY0kpPPDrxm+F1FZ7LyV+dc
G1vfI6aS2azrFrBNXSeOArJ/erGHIGhWxFY0c3bcGOjXwsLWRjQ03Kdj9ffj6UFL
4JJaI0I01RilAo+woaZhNmOHl1VxSsU1lDGF7IvW3t0qKLaSg/Rv3pQqdKyjq8I5
IxPlUEMdo1EDZZx4qLmYBM1tWhgMbn4nx6P2BS7obnPdaf3B0RPxI68Z49RYZKvR
/wTyr7oWCCRQDwCuVH8t/jUrSWspzEK7ApXHdh7T9JlNurFW7oxc8ylooQrAn3Gn
mru7X3cUeVtiosAklZ7w+JNxm44IRmDKNVDeAaat+q35EA8MRFGiuXEOeNw54tWH
zNkUyUJ79Ie7BkGrZFUFqkvfY3Q/xLaBGYDQe65S8/rerybL0YI7RmMiz4x7yq8L
GoIDwPsn0z/AFefoGTi0tAXZeC+EA62okK1kKR9qrh9gmD59uiMbFX1BHe3rWhgP
cCPScYeameXV3K6wwQpX8JTdptqMAH5cpEVoUZ/PZZpkaiCuWcMODVbqTpm4SRPt
Q9s5+6/g0TUUqz7Fwi0dlfnMZVuK0a1Uf/SBYR7f/UYVLfF5juTZ+IRJwQWwp6QX
CzfYms0W34/srtM72mQOpKTd0o3xuFyVbQtZPOpNghIjArQqwt34nEzXPYHqasDx
c/yIPdW+B/YVcFPdRV16Izqmjdlupv6pPjY/T6GdHczQsH9gD28HN9+Ka2Cvficf
evO7IXe0RuvodQ3tB4LmeWoJB10G7Sko2EEfpFTDXke9Ak/5cGrpdPMtbXCAIm1o
B5UhrqNuUYSWdo0mGttbSjFR7pyLujsxLNnp8teBi33QOUhrSId5+mOvtFDGiZKa
QCC+W+BIh6IFIwnxH4dDxjz3M65NXzqNV+6mXEFU77cX+oTF4BRe0R/L4nPoaBAN
smRxtqBItpVFUdsOVb6bXg==

View File

@@ -1 +0,0 @@
.venv

View File

@@ -1,38 +0,0 @@
import psycopg2
from loguru import logger
try:
srcConn = psycopg2.connect(database="level_monitoring_berresheim")
srcConn.autocommit = False
destConn = psycopg2.connect(database="udi-berresheim")
destConn.autocommit = False
with srcConn.cursor() as srcCur, destConn.cursor() as destCur:
srcCur.execute("select time, application_name, raw_level, level, status, battery from measurement_t")
for srcObj in srcCur:
timestamp = srcObj[0]
deviceName = srcObj[1]
rawLevel = srcObj[2]
level = srcObj[3]
status = srcObj[4]
battery = srcObj[5]
logger.info(f"{timestamp=}, {deviceName=}, {rawLevel=}, {level=}, {status=}, {battery=}")
destTime = timestamp
destApplication = "de-hottis-level-monitoring"
destDevice = "eui-a84041a2c18341d6"
destAttributes = '{"ApplicationId":"de-hottis-level-monitoring", "DeviceType":"dragino-ldds75", "Status":"' + status + '","Hint": "Migrated"}'
destValues = '{"Battery":{"unit":"V","label":"Battery","value":' + str(battery) + ',"variable":"Voltage"}, "Distance":{"unit":"mm","label":"Distance","variable":"Level","value":' + str(rawLevel) + '}, "CorrectedDistance":{"unit":"mm", "label":"CorrectedDistance", "variable":"Level","value":' + str(level) + '}}'
logger.info(f"{destTime=}, {destApplication=}, {destDevice=}, {destAttributes=}, {destValues=}")
destCur.execute("insert into measurements (time, application, device, attributes, values) values(%s, %s, %s, %s, %s)",
(destTime, destApplication, destDevice, destAttributes, destValues))
destConn.commit()
finally:
if srcConn:
srcConn.close()
if destConn:
destConn.close()

View File

@@ -1,79 +0,0 @@
import psycopg2
from loguru import logger
import os
srcPgHost = os.environ["SRC_PGHOST"]
srcPgUser = os.environ["SRC_PGUSER"]
srcPgPassword = os.environ["SRC_PGPASSWORD"]
srcPgDatabase = os.environ["SRC_PGDATABASE"]
destPgHost = os.environ["DEST_PGHOST"]
destPgUser = os.environ["DEST_PGUSER"]
destPgPassword = os.environ["DEST_PGPASSWORD"]
destPgDatabase = os.environ["DEST_PGDATABASE"]
try:
srcConn = psycopg2.connect(
host=srcPgHost,
dbname=srcPgDatabase,
user=srcPgUser,
password=srcPgPassword,
sslmode='require'
)
srcConn.autocommit = False
destConn = psycopg2.connect(
host=destPgHost,
dbname=destPgDatabase,
user=destPgUser,
password=destPgPassword,
sslmode='require'
)
destConn.autocommit = False
with srcConn.cursor() as srcCur, destConn.cursor() as destCur:
srcCur.execute("select time, deviceid, status, state, importenergyactive, importenergyreactive, exportenergyactive, exportenergyreactive, powerapparent, poweractive, powerreactive, powerdemandpositive, powerdemandreverse, factor, angle, voltage, current, powerdemand from pv_power_measurement_t order by time")
for srcObj in srcCur:
timestamp = srcObj[0]
deviceName = srcObj[1]
status = srcObj[2]
state = srcObj[3]
importenergyactive = srcObj[4]
importenergyreactive = srcObj[5]
exportenergyactive = srcObj[6]
exportenergyreactive = srcObj[7]
powerapparent = srcObj[8]
poweractive = srcObj[9]
powerreactive = srcObj[10]
powerdemandpositive = srcObj[11]
powerdemandreverse = srcObj[12]
factor = srcObj[13]
angle = srcObj[14]
voltage = srcObj[15]
current = srcObj[16]
powerdemand = srcObj[17]
logger.info(f"{timestamp=}, {deviceName=}")
destTime = timestamp
destApplication = "PV"
destDevice = "Powermeter"
destAttributes = f"{{\"ApplicationId\":\"PV\", \"Status\":\"{status}\",\"Hint\": \"Migrated\"}}"
destValues = f"{{\"Cnt\": {{\"unit\": \"\", \"label\": \"\", \"value\": \"-1\", \"variable\": \"Cnt\"}}, \"Angle\": {{\"unit\": \"degree\", \"label\": \"\", \"value\": \"{angle}\", \"variable\": \"Angle\"}}, \"State\": {{\"unit\": \"\", \"label\": \"\", \"value\": \"{state}\", \"variable\": \"State\"}}, \"Factor\": {{\"unit\": \"\", \"label\": \"\", \"value\": \"{factor}\", \"variable\": \"Factor\"}}, \"Current\": {{\"unit\": \"A\", \"label\": \"\", \"value\": \"{current}\", \"variable\": \"Current\"}}, \"Voltage\": {{\"unit\": \"V\", \"label\": \"\", \"value\": \"{voltage}\", \"variable\": \"Voltage\"}}, \"PowerActive\": {{\"unit\": \"W\", \"label\": \"\", \"value\": \"{poweractive}\", \"variable\": \"PowerActive\"}}, \"PowerApparent\": {{\"unit\": \"VA\", \"label\": \"\", \"value\": \"{powerapparent}\", \"variable\": \"PowerApparent\"}}, \"PowerReactive\": {{\"unit\": \"VA\", \"label\": \"\", \"value\": \"{powerreactive}\", \"variable\": \"PowerReactive\"}}, \"ExportEnergyActive\": {{\"unit\": \"Wh\", \"label\": \"\", \"value\": \"{exportenergyactive}\", \"variable\": \"ExportEnergyActive\"}}, \"ImportEnergyActive\": {{\"unit\": \"Wh\", \"label\": \"\", \"value\": \"{importenergyactive}\", \"variable\": \"ImportEnergyActive\"}}, \"PowerDemandReverse\": {{\"unit\": \"W\", \"label\": \"\", \"value\": \"{powerdemandreverse}\", \"variable\": \"PowerDemandReverse\"}}, \"PowerDemandPositive\": {{\"unit\": \"W\", \"label\": \"\", \"value\": \"{powerdemandpositive}\", \"variable\": \"PowerDemandPositive\"}}, \"ExportEnergyReactive\": {{\"unit\": \"VAh\", \"label\": \"\", \"value\": \"{exportenergyreactive}\", \"variable\": \"ExportEnergyReactive\"}}, \"ImportEnergyReactive\": {{\"unit\": \"VAh\", \"label\": \"\", \"value\": \"{importenergyreactive}\", \"variable\": \"ImportEnergyReactive\"}}}}"
logger.info(f"{destTime=}, {destApplication=}, {destDevice=}, {destAttributes=}, {destValues=}")
try:
destCur.execute("insert into measurements (time, application, device, attributes, values) values(%s, %s, %s, %s, %s)",
(destTime, destApplication, destDevice, destAttributes, destValues))
destConn.commit()
except Exception as e:
destConn.rollback()
logger.error(f"Error {e} when inserted time {destTime}")
finally:
if srcConn:
srcConn.close()
if destConn:
destConn.close()

View File

@@ -1,78 +0,0 @@
import psycopg2
from loguru import logger
import os
srcPgHost = os.environ["SRC_PGHOST"]
srcPgUser = os.environ["SRC_PGUSER"]
srcPgPassword = os.environ["SRC_PGPASSWORD"]
srcPgDatabase = os.environ["SRC_PGDATABASE"]
destPgHost = os.environ["DEST_PGHOST"]
destPgUser = os.environ["DEST_PGUSER"]
destPgPassword = os.environ["DEST_PGPASSWORD"]
destPgDatabase = os.environ["DEST_PGDATABASE"]
try:
srcConn = psycopg2.connect(
host=srcPgHost,
dbname=srcPgDatabase,
user=srcPgUser,
password=srcPgPassword,
sslmode='require'
)
srcConn.autocommit = False
destConn = psycopg2.connect(
host=destPgHost,
dbname=destPgDatabase,
user=destPgUser,
password=destPgPassword,
sslmode='require'
)
destConn.autocommit = False
with srcConn.cursor() as srcCur, destConn.cursor() as destCur:
srcCur.execute("select time, location, status, temperature, category from room_climate_measurement_t where category = 'heating' and time > '2023-12-19 05:20:00' order by time")
for srcObj in srcCur:
timestamp = srcObj[0]
location = srcObj[1]
status = srcObj[2]
temperature = srcObj[3]
category = srcObj[4]
logger.info(f"{timestamp=}, {location=}, {status=}, {temperature=}, {category=}")
destTime = timestamp
match category:
case 'heating':
destApplication = 'Temperature Heating'
case 'Outdoor':
destApplication = 'Temperature Wago'
case 'Device':
destApplication = 'Temperature Wago'
case 'Indoor':
destApplication = 'Temperature Multisensor' if location != 'Anna-Koeln-2' else 'Temperature Shelly Plus HT'
case 'Special':
destApplication = 'Temperature Multisensor'
destDevice = location
destAttributes = '{"ApplicationId":"temperature-imported", "Status":"' + status + '","Location":"' + location + '","Category":"' + category + '","Hint": "Migrated"}'
destValues = '{"Value": {"unit": "°C", "label": "", "value": "' + str(temperature) + '", "variable": ""}}'
logger.info(f"{destTime=}, {destApplication=}, {destDevice=}, {destAttributes=}, {destValues=}")
try:
destCur.execute("insert into measurements (time, application, device, attributes, values) values(%s, %s, %s, %s, %s)",
(destTime, destApplication, destDevice, destAttributes, destValues))
destConn.commit()
except Exception as e:
destConn.rollback()
logger.error(f"Error {e} when inserted time {destTime}")
finally:
if srcConn:
srcConn.close()
if destConn:
destConn.close()

View File

@@ -1,2 +0,0 @@
loguru==0.7.2
psycopg2==2.9.9

View File

@@ -118,3 +118,65 @@ create or replace view router_v as
cast(values->'wan-out'->>'value' as int) as wanOutOctetsPerSeconds
from measurements
where application = 'SNMP' and device = '172.16.3.1';
create or replace view lora_sht21_v as
select time,
cast(values->'Humidity'->>'value' as float) as humidity,
cast(values->'Temperature'->>'value' as float) as temperature,
m.device as device,
d.attributes->>'Label' as label
from measurements m, devices d
where m.application = 'de-hottis-app01' and
m.attributes->>'DeviceType' = 'hottis-gy21' and
m.device = d.label;
create or replace view ntp_server_snmp_v as
select time,
device,
cast(values->'load1'->>'value' as float) as laLoad1,
cast(values->'lan-in'->>'value' as int) as lanInOctetsPerSeconds,
cast(values->'lan-out'->>'value' as int) as lanOutOctetsPerSeconds
from measurements
where application = 'SNMP' and device = '172.16.13.10';
create or replace view ntp_server_variables_v as
select time,
device,
cast(values->'rootdisp'->>'value' as float) as rootdisp
from measurements
where application = 'TSM' and device = '172.16.13.10';
-- Status string `unit:"" json:"status"`
-- Timestamp string `unit:"" json:"timestamp"`
-- VoltageL1 float32 `unit:"V" json:"voltageL1"`
-- VoltageL2 float32 `unit:"V" json:"voltageL2"`
-- VoltageL3 float32 `unit:"V" json:"voltageL3"`
-- CurrentL1 float32 `unit:"A" json:"currentL1"`
-- CurrentL2 float32 `unit:"A" json:"currentL2"`
-- CurrentL3 float32 `unit:"A" json:"currentL3"`
-- PowerL1 float32 `unit:"W" json:"powerL1"`
-- PowerL2 float32 `unit:"W" json:"powerL2"`
-- PowerL3 float32 `unit:"W" json:"powerL3"`
-- TotalImportEnergy float32 `unit:"Wh" json:"totalImportEnergy"`
-- TotalExportEnergy float32 `unit:"Wh" json:"totalExportEnergy"`
-- Cnt int `unit:"" json:"cnt"`
create or replace view car_values_v as
select time,
cast(values->'VoltageL1'->>'value' as float) as voltage_l1,
cast(values->'VoltageL2'->>'value' as float) as voltage_l2,
cast(values->'VoltageL3'->>'value' as float) as voltage_l3,
cast(values->'CurrentL1'->>'value' as float) as current_l1,
cast(values->'CurrentL2'->>'value' as float) as current_l2,
cast(values->'CurrentL3'->>'value' as float) as current_l3,
cast(values->'PowerL1'->>'value' as float) as power_l1,
cast(values->'PowerL2'->>'value' as float) as power_l2,
cast(values->'PowerL3'->>'value' as float) as power_l3,
cast(values->'TotalImportEnergy'->>'value' as float) as total_import_energy,
cast(values->'TotalExportEnergy'->>'value' as float) as total_export_energy,
values->'Status'->>'value' as status,
device
from measurements
where application = 'Car';

View File

@@ -41,3 +41,4 @@ create or replace view cubecell_threeway_battery_v as
from measurements
where application = 'de-hottis-saerbeck-monitoring' and
device = 'eui-70b3d57ed0068fa4';

View File

@@ -1,18 +1,27 @@
{
"mqtt": {
"broker": "mqtt://emqx01-anonymous-cluster-internal.broker.svc.cluster.local:1883",
"broker": "mqtt://172.23.1.102:1883",
"tlsEnable": "false"
},
"topicMappings": [
{
"topics": [ "snmp" ],
"handler": "SNMP",
"handler": "PREP",
"id": "SNMP",
"config": {
"attributes": {
}
}
},
{
"topics": [ "tsm" ],
"handler": "PREP",
"id": "TSM",
"config": {
"attributes": {
}
}
},
{
"topics": [ "dt1/ai/periodic/1" ],
"handler": "DT1T",
@@ -54,9 +63,9 @@
}
},
{
"topics": [ "locative/event/#" ],
"handler": "Locative",
"id": "Locative",
"topics": [ "IoT/Car/Values" ],
"handler": "Car",
"id": "Car",
"config": {
"databaseConnStr": "",
"attributes": {
@@ -91,34 +100,6 @@
}
}
},
{
"topics": [ "NR/Multisensor/+/Temperatur" ],
"handler": "SVEJ",
"id": "SVEJ0",
"config": {
"databaseConnStr": "",
"attributes": {
"application": "Temperature Multisensor",
"deviceSelector": "T:2",
"valueSelector": "J:$.CurrentTemperature",
"unitSelector": "C:°C"
}
}
},
{
"topics": [ "NR/Multisensor/+/Feuchte" ],
"handler": "SVEJ",
"id": "SVEJ1",
"config": {
"databaseConnStr": "",
"attributes": {
"application": "Humidity Multisensor",
"deviceSelector": "T:2",
"valueSelector": "J:$.CurrentRelativeHumidity",
"unitSelector": "C:%"
}
}
},
{
"topics": [ "zigbee2mqtt/+" ],
"handler": "Z2M",
@@ -159,6 +140,6 @@
}
],
"archiver": {
"dir": "/archive"
"dir": "./tmp/udi"
}
}

View File

@@ -5,23 +5,19 @@
},
"topicMappings": [
{
"topics": [ "NR/Multisensor/+/Temperatur" ],
"handler": "SVEJ",
"id": "SVEJ0",
"topics": [ "IoT/PV/Values" ],
"handler": "PV",
"id": "PV",
"config": {
"databaseConnStr": "",
"attributes": {
"application": "Temperature Multisensor",
"deviceSelector": "T:2",
"valueSelector": "J:$.CurrentTemperature",
"unitSelector": "C:°C"
}
}
},
{
"topics": [ "zigbee2mqtt/+" ],
"handler": "Z2M",
"id": "Z2M",
"topics": [ "IoT/Car/Values" ],
"handler": "Car",
"id": "Car",
"config": {
"databaseConnStr": "",
"attributes": {

View File

@@ -1,46 +1,38 @@
package database
import "time"
import "gorm.io/gorm"
type VariableType struct {
Label string `json:"label"`
Variable string `json:"variable"`
Unit string `json:"unit"`
Value interface{} `json:"value,omitempty"`
Label string `json:"label"`
Variable string `json:"variable"`
Unit string `json:"unit"`
Value interface{} `json:"value,omitempty"`
Status string `json:"status,omitempty"`
}
type Measurement struct {
Time time.Time `gorm:"not null;primary_key"`
Application string `gorm:"not null"`
Device string
Attributes map[string]interface{} `gorm:"serializer:json;type:jsonb"`
Values map[string]VariableType `gorm:"serializer:json;type:jsonb"`
Time time.Time
Application string
Device string
Attributes map[string]interface{}
Values map[string]VariableType
}
// Simplified structures for backward compatibility
type DeviceType struct {
Label string
ModelIdentifier string
Attributes map[string]interface{}
}
type Application struct {
gorm.Model
Label string `gorm:"not null"`
Attributes map[string]interface{} `gorm:"serializer:json;type:jsonb"`
}
type DeviceType struct {
gorm.Model
Label string `gorm:"not null"`
ModelIdentifier string
Attributes map[string]interface{} `gorm:"serializer:json;type:jsonb"`
Label string
Attributes map[string]interface{}
}
type Device struct {
gorm.Model
Label string `gorm:"not null;uniqueIndex:idx_label_application_id"`
ApplicationID int `gorm:"not null;uniqueIndex:idx_label_application_id"`
Application Application
DeviceTypeID int `gorm:"not null"`
DeviceType DeviceType
Attributes map[string]interface{} `gorm:"serializer:json;type:jsonb"`
Label string
Application Application
DeviceType DeviceType
Attributes map[string]interface{}
}

View File

@@ -1,96 +1,155 @@
package database
import (
"log"
//"time"
"fmt"
"udi/counter"
"gorm.io/driver/postgres"
"gorm.io/gorm"
"fmt"
"log"
"os"
"udi/counter"
influxdb "github.com/influxdata/influxdb1-client/v2"
)
type DatabaseHandle struct {
initialized bool
dbh *gorm.DB
initialized bool
client influxdb.Client
database string
}
func NewDatabaseHandle() *DatabaseHandle {
var db DatabaseHandle
// inject the whole database configuration via the well-known PG* env variables
conn, err := gorm.Open(postgres.Open(""))
if err != nil {
log.Printf("Unable to open database connection: %s", err)
db.initialized = false
} else {
db.dbh = conn
db.initialized = true
//log.Println("Database connection opened")
}
return &db
var db DatabaseHandle
// Read configuration from environment variables
influxURL := os.Getenv("INFLUXDB_URL")
if influxURL == "" {
influxURL = "http://localhost:8086"
}
influxDB := os.Getenv("INFLUXDB_DATABASE")
if influxDB == "" {
influxDB = "udi"
}
username := os.Getenv("INFLUXDB_USER")
password := os.Getenv("INFLUXDB_PASSWORD")
// Create InfluxDB client
client, err := influxdb.NewHTTPClient(influxdb.HTTPConfig{
Addr: influxURL,
Username: username,
Password: password,
})
if err != nil {
log.Printf("Unable to create InfluxDB client (config: URL: %s, Username: %s, Password: %s): %s", influxDB, username, password, err)
db.initialized = false
return &db
}
// Test connection
_, _, err = client.Ping(0)
if err != nil {
log.Printf("Unable to ping InfluxDB: %s", err)
db.initialized = false
client.Close()
return &db
}
db.client = client
db.database = influxDB
db.initialized = true
log.Printf("InfluxDB connection opened (URL: %s, Database: %s)", influxURL, influxDB)
return &db
}
func (self *DatabaseHandle) StoreMeasurement(measurement *Measurement) {
if ! self.initialized {
log.Printf("Database connection not initialized, can not store, measurement %s lost", measurement)
counter.F("Stored")
return
}
if !self.initialized {
log.Printf("Database connection not initialized, can not store, measurement %v lost", measurement)
counter.F("Stored")
return
}
result := self.dbh.Create(measurement)
if result.Error != nil {
log.Printf("Unable to insert, measurement %s lost, error: %s", measurement, result.Error)
counter.F("Stored")
return
}
// Create batch points
bp, err := influxdb.NewBatchPoints(influxdb.BatchPointsConfig{
Database: self.database,
Precision: "s",
})
if err != nil {
log.Printf("Unable to create batch points: %s", err)
counter.F("Stored")
return
}
//log.Println("Successfully stored measurement")
counter.S("Stored")
// Build tags
tags := make(map[string]string)
if measurement.Device != "" {
tags["device"] = measurement.Device
}
// Build fields from Values
fields := make(map[string]interface{})
for key, varType := range measurement.Values {
// Store the value with the variable name as field key
fields[key] = varType.Value
// Optionally store metadata as separate fields
if varType.Unit != "" {
fields[key+"_unit"] = varType.Unit
}
// This is already the column name, so we can skip it
//if varType.Variable != "" {
// fields[key+"_variable"] = varType.Variable
//}
if varType.Status != "" {
fields[key+"_status"] = varType.Status
}
}
// Add attributes as fields
for key, value := range measurement.Attributes {
if strValue, ok := value.(string); ok {
fields[key] = strValue
} else {
fields[key] = fmt.Sprintf("%v", value)
}
}
// Ensure we have at least one field
if len(fields) == 0 {
log.Printf("No fields to store in measurement, skipping")
counter.F("Stored")
return
}
// Create point
pt, err := influxdb.NewPoint(
measurement.Application,
tags,
fields,
measurement.Time,
)
if err != nil {
log.Printf("Unable to create point: %s", err)
counter.F("Stored")
return
}
bp.AddPoint(pt)
// Write batch
err = self.client.Write(bp)
if err != nil {
log.Printf("Unable to write to InfluxDB, measurement lost, error: %s", err)
counter.F("Stored")
return
}
log.Println("Successfully stored measurement")
counter.S("Stored")
}
func (self *DatabaseHandle) GetDeviceByLabelAndApplication(applicationLabel string, deviceLabel string) (*Device, error) {
if ! self.initialized {
err := fmt.Errorf("Database connection not initialized")
return nil, err
}
var device Device
result := self.dbh.
Preload("Application").
Preload("DeviceType").
Joins("JOIN applications ON devices.application_id = applications.id").
Where("devices.label = ? AND applications.label = ?", deviceLabel, applicationLabel).
First(&device)
if result.Error != nil {
err := fmt.Errorf("Query failed: %s", result.Error)
return nil, err
}
return &device, nil
func (self *DatabaseHandle) Close() {
if self.initialized && self.client != nil {
self.client.Close()
log.Println("InfluxDB connection closed")
}
}
func (self *DatabaseHandle) GetDeviceByLabel(deviceLabel string) (*Device, error) {
if ! self.initialized {
err := fmt.Errorf("Database connection not initialized")
return nil, err
}
var device Device
result := self.dbh.
Preload("Application").
Preload("DeviceType").
Where("devices.label = ?", deviceLabel).
First(&device)
if result.Error != nil {
err := fmt.Errorf("Query failed: %s", result.Error)
return nil, err
}
return &device, nil
}

View File

@@ -1,55 +0,0 @@
package database
import (
"log"
//"time"
"gorm.io/driver/postgres"
"gorm.io/gorm"
)
func Migrate() {
dsn := ""
db, err := gorm.Open(postgres.Open(dsn))
if err != nil {
log.Fatalf("Unable to open database connection: %s", err)
}
db.AutoMigrate(&Application{})
log.Println("Application created")
db.AutoMigrate(&DeviceType{})
log.Println("DeviceType created")
db.AutoMigrate(&Device{})
log.Println("Device created")
db.AutoMigrate(&Measurement{})
log.Println("Measurement created")
log.Println("Remember to call create_hypertable on measurements, sowhat I can't do that for you.")
/*
m := Measurement {
Time: time.Now(),
Application: "app",
Attributes: nil,
Values: []SensorType {
{ Variable: "Temperature", Unit: "Degree Celsius", Value: 1.0 },
{ Variable: "Temperature", Unit: "Degree Celsius", Value: 3.0 },
},
}
db.Create(&m)
m = Measurement {
Time: time.Now(),
Application: "app",
Attributes: nil,
Values: []SensorType {
{ Variable: "Temperature", Unit: "Degree Celsius", Value: 10.0 },
{ Variable: "Temperature", Unit: "Degree Celsius", Value: 30.0 },
},
}
db.Create(&m)
*/
}

View File

@@ -1,41 +0,0 @@
create extension if not exists timescaledb;
create table application_t (
id serial not null primary key,
label text not null unique,
attributes jsonb
);
create table sensor_type_t (
id serial not null primary key,
label text not null unique,
variable text not null,
unit text not null,
converter text not null,
attributes jsonb
);
create table sensor_t (
id serial not null primary key,
label text not null,
application int references application_t(id),
sensor_type int references sensor_type_t(id),
attributes jsonb,
unique (label, application)
);
create table measurement_t (
time timestamp without time zone not null,
application text not null,
sensor_type text not null,
sensor text not null,
variable text not null,
unit text not null,
value double precision not null,
attributes jsonb
);
select create_hypertable('measurement_t', 'time');

View File

@@ -1,149 +1,154 @@
package dispatcher
import "log"
import "time"
import "os"
import "fmt"
import "net/url"
import "udi/mqtt"
import "udi/config"
import "udi/counter"
import "udi/handlers/handler"
import "udi/handlers/ttn"
import "udi/handlers/iot"
import "udi/handlers/pv"
import "udi/handlers/mbgw3"
import "udi/handlers/sver"
import "udi/handlers/svej"
import "udi/handlers/dt1t"
import "udi/handlers/locative"
import "udi/handlers/snmp"
import "udi/handlers/z2m"
import (
"fmt"
"log"
"net/url"
"os"
"time"
"udi/config"
"udi/counter"
"udi/handlers/car"
"udi/handlers/dt1t"
"udi/handlers/handler"
"udi/handlers/iot"
"udi/handlers/locative"
"udi/handlers/mbgw3"
"udi/handlers/prepared"
"udi/handlers/pv"
"udi/handlers/svej"
"udi/handlers/sver"
// "udi/handlers/ttn"
"udi/handlers/z2m"
"udi/mqtt"
)
var handlerMap map[string]handler.Handler = make(map[string]handler.Handler)
var archiverChannel chan handler.MessageT = make(chan handler.MessageT, 100)
func InitDispatcher() {
log.Printf("Dispatcher initializing")
go archiver()
log.Printf("Dispatcher initializing")
go archiver()
for _, mapping := range config.Config.TopicMappings {
// log.Printf("Trying to initialize %s", mapping)
for _, mapping := range config.Config.TopicMappings {
// log.Printf("Trying to initialize %s", mapping)
var factory interface{}
switch mapping.Handler {
case "TTN":
factory = ttn.New
case "IoT":
factory = iot.New
case "PV":
factory = pv.New
case "MBGW3":
factory = mbgw3.New
case "SVER":
factory = sver.New
case "SVEJ":
factory = svej.New
case "DT1T":
factory = dt1t.New
case "Locative":
factory = locative.New
case "SNMP":
factory = snmp.New
case "Z2M":
factory = z2m.New
default:
factory = nil
log.Printf("No handler %s found, ignore mapping", mapping.Handler)
}
var factory interface{}
switch mapping.Handler {
// case "TTN":
// factory = ttn.New
case "IoT":
factory = iot.New
case "PV":
factory = pv.New
case "MBGW3":
factory = mbgw3.New
case "SVER":
factory = sver.New
case "SVEJ":
factory = svej.New
case "DT1T":
factory = dt1t.New
case "Locative":
factory = locative.New
case "PREP":
factory = prepared.New
case "Z2M":
factory = z2m.New
case "Car":
factory = car.New
default:
factory = nil
log.Printf("No handler %s found, ignore mapping", mapping.Handler)
}
fn, ok := factory.(func(string, config.HandlerConfigT) handler.Handler)
if ! ok {
log.Println("Typ Assertion failed")
break
}
handler := fn(mapping.Id, mapping.Config)
handlerMap[mapping.Id] = handler
}
fn, ok := factory.(func(string, config.HandlerConfigT) handler.Handler)
if !ok {
log.Println("Typ Assertion failed")
break
}
handler := fn(mapping.Id, mapping.Config)
handlerMap[mapping.Id] = handler
}
//log.Printf("handlerMap: %s", handlerMap)
//log.Printf("handlerMap: %s", handlerMap)
}
func storeMessage(filename string, item handler.MessageT) {
file, err := os.OpenFile(filename, os.O_APPEND | os.O_CREATE | os.O_WRONLY, 0644)
if err != nil {
log.Printf("Unable to open archiving file %s, message is not archived: %s", filename, err)
counter.F("Archived")
return
}
defer file.Close()
archivingString := fmt.Sprintf("%s - %s - %s\n", item.Timestamp.Format("2006-01-02 15:04:05"), item.Topic, item.Payload)
_, err = file.WriteString(string(archivingString) + "\n")
if err != nil {
log.Printf("Unable to write message, message is not archived: %s", err)
counter.F("Archived")
return
}
//log.Println("Successfully archived message")
counter.S("Archived")
file, err := os.OpenFile(filename, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644)
if err != nil {
log.Printf("Unable to open archiving file %s, message is not archived: %s", filename, err)
counter.F("Archived")
return
}
defer file.Close()
archivingString := fmt.Sprintf("%s - %s - %s\n", item.Timestamp.Format("2006-01-02 15:04:05"), item.Topic, item.Payload)
_, err = file.WriteString(string(archivingString) + "\n")
if err != nil {
log.Printf("Unable to write message, message is not archived: %s", err)
counter.F("Archived")
return
}
//log.Println("Successfully archived message")
counter.S("Archived")
}
func archiver() {
archivingRootDir := config.Config.Archiver.Dir
var lastArchivingDir string
archivingRootDir := config.Config.Archiver.Dir
var lastArchivingDir string
for {
select {
case message := <- archiverChannel:
currentDateStr := message.Timestamp.Format("2006/01/02/15")
currentArchivingDir := archivingRootDir + "/" + currentDateStr
if currentArchivingDir != lastArchivingDir {
err := os.MkdirAll(currentArchivingDir, 0755)
if err != nil {
log.Printf("Unable to create archiving dir %s: %s", currentArchivingDir, err)
counter.F("Archived")
}
lastArchivingDir = currentArchivingDir
//log.Printf("Archiving dir %s created", currentArchivingDir)
}
archivingFilename := fmt.Sprintf("%s/%s", currentArchivingDir, url.PathEscape(message.Topic))
storeMessage(archivingFilename, message)
}
}
for {
select {
case message := <-archiverChannel:
currentDateStr := message.Timestamp.Format("2006/01/02/15")
currentArchivingDir := archivingRootDir + "/" + currentDateStr
if currentArchivingDir != lastArchivingDir {
err := os.MkdirAll(currentArchivingDir, 0755)
if err != nil {
log.Printf("Unable to create archiving dir %s: %s", currentArchivingDir, err)
counter.F("Archived")
}
lastArchivingDir = currentArchivingDir
//log.Printf("Archiving dir %s created", currentArchivingDir)
}
archivingFilename := fmt.Sprintf("%s/%s", currentArchivingDir, url.PathEscape(message.Topic))
storeMessage(archivingFilename, message)
}
}
}
func InputDispatcher() {
for {
select {
case mqttMessage := <- mqtt.InputChannel:
//log.Printf("Message arrived in inputDispatcher, topic: %s\n", mqttMessage.Topic)
message := handler.MessageT { time.Now(), mqttMessage.Topic, string(mqttMessage.Payload) }
archiverChannel <- message
handleMessage(message)
}
}
for {
select {
case mqttMessage := <-mqtt.InputChannel:
//log.Printf("Message arrived in inputDispatcher, topic: %s\n", mqttMessage.Topic)
message := handler.MessageT{time.Now(), mqttMessage.Topic, string(mqttMessage.Payload)}
archiverChannel <- message
handleMessage(message)
}
}
}
func handleMessage(message handler.MessageT) {
for _, mapping := range config.Config.TopicMappings {
// log.Printf("Testing %s -> %s", mapping.Topics, mapping.Handler)
for _, subscribedTopic := range mapping.Topics {
// log.Printf("Testing %s in %s", message.Topic, subscribedTopic)
if mqtt.TopicMatchesSubscription(message.Topic, subscribedTopic) {
//log.Printf("Handle message in handler %s", mapping.Id)
handler, exists := handlerMap[mapping.Id]
if exists {
handler.Handle(message)
counter.S("Dispatched")
return
} else {
log.Printf("Handler %s not found, message %s is lost", mapping.Id, message)
counter.F("Dispatched")
}
}
}
}
log.Printf("No match for topic %s, message %s is lost", message.Topic, message)
counter.F("Dispatched")
for _, mapping := range config.Config.TopicMappings {
// log.Printf("Testing %s -> %s", mapping.Topics, mapping.Handler)
for _, subscribedTopic := range mapping.Topics {
// log.Printf("Testing %s in %s", message.Topic, subscribedTopic)
if mqtt.TopicMatchesSubscription(message.Topic, subscribedTopic) {
//log.Printf("Handle message in handler %s", mapping.Id)
handler, exists := handlerMap[mapping.Id]
if exists {
handler.Handle(message)
counter.S("Dispatched")
return
} else {
log.Printf("Handler %s not found, message %s is lost", mapping.Id, message)
counter.F("Dispatched")
}
}
}
}
log.Printf("No match for topic %s, message %s is lost", message.Topic, message)
counter.F("Dispatched")
}

View File

@@ -3,23 +3,14 @@ module udi
go 1.22.3
require (
github.com/eclipse/paho.mqtt.golang v1.4.3
github.com/google/uuid v1.4.0
github.com/eclipse/paho.mqtt.golang v1.5.0
github.com/google/uuid v1.6.0
github.com/influxdata/influxdb1-client v0.0.0-20220302092344-a9ab5670611c
github.com/oliveagle/jsonpath v0.0.0-20180606110733-2e52cf6e6852
gorm.io/driver/postgres v1.5.9
gorm.io/gorm v1.25.11
)
require (
github.com/gorilla/websocket v1.5.0 // indirect
github.com/jackc/pgpassfile v1.0.0 // indirect
github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a // indirect
github.com/jackc/pgx/v5 v5.5.5 // indirect
github.com/jackc/puddle/v2 v2.2.1 // indirect
github.com/jinzhu/inflection v1.0.0 // indirect
github.com/jinzhu/now v1.1.5 // indirect
golang.org/x/crypto v0.25.0 // indirect
golang.org/x/net v0.27.0 // indirect
golang.org/x/sync v0.7.0 // indirect
golang.org/x/text v0.16.0 // indirect
github.com/gorilla/websocket v1.5.3 // indirect
golang.org/x/net v0.34.0 // indirect
golang.org/x/sync v0.10.0 // indirect
)

View File

@@ -1,46 +1,14 @@
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/eclipse/paho.mqtt.golang v1.4.3 h1:2kwcUGn8seMUfWndX0hGbvH8r7crgcJguQNCyp70xik=
github.com/eclipse/paho.mqtt.golang v1.4.3/go.mod h1:CSYvoAlsMkhYOXh/oKyxa8EcBci6dVkLCbo5tTC1RIE=
github.com/google/uuid v1.4.0 h1:MtMxsa51/r9yyhkyLsVeVt0B+BGQZzpQiTQ4eHZ8bc4=
github.com/google/uuid v1.4.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/gorilla/websocket v1.5.0 h1:PPwGk2jz7EePpoHN/+ClbZu8SPxiqlu12wZP/3sWmnc=
github.com/gorilla/websocket v1.5.0/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
github.com/jackc/pgpassfile v1.0.0 h1:/6Hmqy13Ss2zCq62VdNG8tM1wchn8zjSGOBJ6icpsIM=
github.com/jackc/pgpassfile v1.0.0/go.mod h1:CEx0iS5ambNFdcRtxPj5JhEz+xB6uRky5eyVu/W2HEg=
github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a h1:bbPeKD0xmW/Y25WS6cokEszi5g+S0QxI/d45PkRi7Nk=
github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a/go.mod h1:5TJZWKEWniPve33vlWYSoGYefn3gLQRzjfDlhSJ9ZKM=
github.com/jackc/pgx/v5 v5.5.5 h1:amBjrZVmksIdNjxGW/IiIMzxMKZFelXbUoPNb+8sjQw=
github.com/jackc/pgx/v5 v5.5.5/go.mod h1:ez9gk+OAat140fv9ErkZDYFWmXLfV+++K0uAOiwgm1A=
github.com/jackc/puddle/v2 v2.2.1 h1:RhxXJtFG022u4ibrCSMSiu5aOq1i77R3OHKNJj77OAk=
github.com/jackc/puddle/v2 v2.2.1/go.mod h1:vriiEXHvEE654aYKXXjOvZM39qJ0q+azkZFrfEOc3H4=
github.com/jinzhu/inflection v1.0.0 h1:K317FqzuhWc8YvSVlFMCCUb36O/S9MCKRDI7QkRKD/E=
github.com/jinzhu/inflection v1.0.0/go.mod h1:h+uFLlag+Qp1Va5pdKtLDYj+kHp5pxUVkryuEj+Srlc=
github.com/jinzhu/now v1.1.5 h1:/o9tlHleP7gOFmsnYNz3RGnqzefHA47wQpKrrdTIwXQ=
github.com/jinzhu/now v1.1.5/go.mod h1:d3SSVoowX0Lcu0IBviAWJpolVfI5UJVZZ7cO71lE/z8=
github.com/eclipse/paho.mqtt.golang v1.5.0 h1:EH+bUVJNgttidWFkLLVKaQPGmkTUfQQqjOsyvMGvD6o=
github.com/eclipse/paho.mqtt.golang v1.5.0/go.mod h1:du/2qNQVqJf/Sqs4MEL77kR8QTqANF7XU7Fk0aOTAgk=
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/gorilla/websocket v1.5.3 h1:saDtZ6Pbx/0u+bgYQ3q96pZgCzfhKXGPqt7kZ72aNNg=
github.com/gorilla/websocket v1.5.3/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
github.com/influxdata/influxdb1-client v0.0.0-20220302092344-a9ab5670611c h1:qSHzRbhzK8RdXOsAdfDgO49TtqC1oZ+acxPrkfTxcCs=
github.com/influxdata/influxdb1-client v0.0.0-20220302092344-a9ab5670611c/go.mod h1:qj24IKcXYK6Iy9ceXlo3Tc+vtHo9lIhSX5JddghvEPo=
github.com/oliveagle/jsonpath v0.0.0-20180606110733-2e52cf6e6852 h1:Yl0tPBa8QPjGmesFh1D0rDy+q1Twx6FyU7VWHi8wZbI=
github.com/oliveagle/jsonpath v0.0.0-20180606110733-2e52cf6e6852/go.mod h1:eqOVx5Vwu4gd2mmMZvVZsgIqNSaW3xxRThUJ0k/TPk4=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.8.1 h1:w7B6lhMri9wdJUVmEZPGGhZzrYTPvgJArz7wNPgYKsk=
github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
golang.org/x/crypto v0.25.0 h1:ypSNr+bnYL2YhwoMt2zPxHFmbAN1KZs/njMG3hxUp30=
golang.org/x/crypto v0.25.0/go.mod h1:T+wALwcMOSE0kXgUAnPAHqTLW+XHgcELELW8VaDgm/M=
golang.org/x/net v0.27.0 h1:5K3Njcw06/l2y9vpGCSdcxWOYHOUk3dVNGDXN+FvAys=
golang.org/x/net v0.27.0/go.mod h1:dDi0PyhWNoiUOrAS8uXv/vnScO4wnHQO4mj9fn/RytE=
golang.org/x/sync v0.7.0 h1:YsImfSBoP9QPYL0xyKJPq0gcaJdG3rInoqxTWbfQu9M=
golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/text v0.16.0 h1:a94ExnEXNtEwYLGJSIUxnWoxoRz/ZcCsV63ROupILh4=
golang.org/x/text v0.16.0/go.mod h1:GhwF1Be+LQoKShO3cGOHzqOgRrGaYc9AvblQOmPVHnI=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gorm.io/driver/postgres v1.5.9 h1:DkegyItji119OlcaLjqN11kHoUgZ/j13E0jkJZgD6A8=
gorm.io/driver/postgres v1.5.9/go.mod h1:DX3GReXH+3FPWGrrgffdvCk3DQ1dwDPdmbenSkweRGI=
gorm.io/gorm v1.25.11 h1:/Wfyg1B/je1hnDx3sMkX+gAlxrlZpn6X0BXRlwXlvHg=
gorm.io/gorm v1.25.11/go.mod h1:xh7N7RHfYlNc5EmcI/El95gXusucDrQnHXe0+CgWcLQ=
golang.org/x/net v0.34.0 h1:Mb7Mrk043xzHgnRM88suvJFwzVrRfHEHJEl5/71CKw0=
golang.org/x/net v0.34.0/go.mod h1:di0qlW3YNM5oh6GqDGQr92MyTozJPmybPK4Ev/Gm31k=
golang.org/x/sync v0.10.0 h1:3NQrjDixjgGwUOCaF8w2+VYHv0Ve/vGYSbdkTa98gmQ=
golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=

View File

@@ -0,0 +1,94 @@
package car
import (
"encoding/json"
"log"
"reflect"
"time"
"udi/config"
"udi/database"
"udi/handlers/handler"
)
type CarHandler struct {
handler.CommonHandler
dbh *database.DatabaseHandle
}
/*
{
"status": "Ok",
"timestamp": "2025-12-15T13:11:15.648243",
"voltageL1": 228.68,
"voltageL2": 227.69,
"voltageL3": 228.53,
"currentL1": 0.0,
"currentL2": 0.0,
"currentL3": 0.0,
"powerL1": 0.0,
"powerL2": 0.0,
"powerL3": 0.0,
"totalImportEnergy": 0.0,
"totalExportEnergy": 0.0,
"cnt": 399300}
*/
type CarValue struct {
Status string `unit:"" json:"status"`
Timestamp string `unit:"" json:"timestamp"`
VoltageL1 float32 `unit:"V" json:"voltageL1"`
VoltageL2 float32 `unit:"V" json:"voltageL2"`
VoltageL3 float32 `unit:"V" json:"voltageL3"`
CurrentL1 float32 `unit:"A" json:"currentL1"`
CurrentL2 float32 `unit:"A" json:"currentL2"`
CurrentL3 float32 `unit:"A" json:"currentL3"`
PowerL1 float32 `unit:"W" json:"powerL1"`
PowerL2 float32 `unit:"W" json:"powerL2"`
PowerL3 float32 `unit:"W" json:"powerL3"`
TotalImportEnergy float32 `unit:"Wh" json:"totalImportEnergy"`
TotalExportEnergy float32 `unit:"Wh" json:"totalExportEnergy"`
Cnt int `unit:"" json:"cnt"`
}
func New(id string, config config.HandlerConfigT) handler.Handler {
t := &CarHandler{}
t.Id = id
t.dbh = database.NewDatabaseHandle()
log.Printf("Handler Car %d initialized", id)
return t
}
func (self *CarHandler) Handle(message handler.MessageT) {
//log.Printf("Handler Car %d processing %s -> %s", self.id, message.Topic, message.Payload)
var carValue CarValue
err := json.Unmarshal([]byte(message.Payload), &carValue)
if err != nil {
self.Lost("Unable to parse payload into carValue struct", err, message)
return
}
variables := make(map[string]database.VariableType)
carValueStructValue := reflect.ValueOf(carValue)
for i := 0; i < carValueStructValue.NumField(); i++ {
field := carValueStructValue.Type().Field(i)
fieldValue := carValueStructValue.Field(i)
v := database.VariableType{
Label: "",
Variable: field.Name,
Unit: field.Tag.Get("unit"),
Value: fieldValue.Interface(),
}
variables[field.Name] = v
}
measurement := database.Measurement{
Time: time.Now(),
Application: "Car",
Device: "Powermeter",
Values: variables,
}
self.dbh.StoreMeasurement(&measurement)
self.S()
}

View File

@@ -1,80 +1,75 @@
package dt1t
import (
"log"
"fmt"
"time"
"strconv"
"udi/handlers/handler"
"udi/database"
"udi/config"
"log"
"strconv"
"time"
"udi/config"
"udi/database"
"udi/handlers/handler"
)
type Dt1tHandler struct {
handler.CommonHandler
ready bool
label string
dbh *database.DatabaseHandle
application string
device string
handler.CommonHandler
ready bool
label string
dbh *database.DatabaseHandle
application string
device string
}
func New(id string, config config.HandlerConfigT) handler.Handler {
t := &Dt1tHandler {
}
t := &Dt1tHandler{}
if config.Attributes["Application"] == "" {
log.Println("Error: application not configured")
return t
}
t.application = config.Attributes["Application"]
if config.Attributes["Device"] == "" {
log.Println("Error: device not configured")
return t
}
t.device = config.Attributes["Device"]
t.Id = id
t.dbh = database.NewDatabaseHandle()
t.ready = true
return t
if config.Attributes["Application"] == "" {
log.Println("Error: application not configured")
return t
}
t.application = config.Attributes["Application"]
if config.Attributes["Device"] == "" {
log.Println("Error: device not configured")
return t
}
t.device = config.Attributes["Device"]
t.Id = id
t.dbh = database.NewDatabaseHandle()
log.Printf("Handler DT1T %d initialized", id)
t.ready = true
return t
}
func (self *Dt1tHandler) Handle(message handler.MessageT) {
if ! self.ready {
self.Lost("Handler is not marked as ready", nil, message)
return
}
// log.Printf("Handler DT1T %d processing %s -> %s", self.id, message.Topic, message.Payload)
if !self.ready {
self.Lost("Handler is not marked as ready", nil, message)
return
}
// log.Printf("Handler DT1T %d processing %s -> %s", self.id, message.Topic, message.Payload)
temperature, err := strconv.Atoi(message.Payload)
if err != nil {
self.Lost("Invalid raw value", err, message)
return
}
if temperature & 0x8000 != 0{
temperature = ((temperature - 1) ^ 0xffff) * -1
}
temperatureF := float32(temperature) / 10.0
temperature, err := strconv.Atoi(message.Payload)
if err != nil {
self.Lost("Invalid raw value", err, message)
return
}
if temperature&0x8000 != 0 {
temperature = ((temperature - 1) ^ 0xffff) * -1
}
temperatureF := float32(temperature) / 10.0
var measurement database.Measurement
measurement.Time = time.Now()
measurement.Application = self.application
measurement.Device = self.device
var variable database.VariableType
variable.Label = "Temperature"
variable.Variable = ""
variable.Unit = "°C"
variable.Value = fmt.Sprintf("%f", temperatureF)
measurement.Values = make(map[string]database.VariableType)
measurement.Values["Value"] = variable
var measurement database.Measurement
measurement.Time = time.Now()
measurement.Application = self.application
measurement.Device = self.device
// log.Printf("Prepared measurement item: %s", measurement)
self.dbh.StoreMeasurement(&measurement)
self.S()
var variable database.VariableType
variable.Label = "Temperature"
variable.Variable = ""
variable.Unit = "°C"
variable.Value = temperatureF
measurement.Values = make(map[string]database.VariableType)
measurement.Values["Value"] = variable
// log.Printf("Prepared measurement item: %s", measurement)
self.dbh.StoreMeasurement(&measurement)
self.S()
}

View File

@@ -31,6 +31,7 @@ func New(id string, config config.HandlerConfigT) handler.Handler {
}
t.Id = id
t.dbh = database.NewDatabaseHandle()
log.Printf("Handler Locative %d initialized", id)
return t
}

View File

@@ -2,6 +2,7 @@ package mbgw3
import (
"time"
"log"
"strconv"
"encoding/json"
"udi/config"
@@ -31,6 +32,7 @@ func New(id string, config config.HandlerConfigT) handler.Handler {
}
t.Id = id
t.dbh = database.NewDatabaseHandle()
log.Printf("Handler MBGW3 %d initialized", id)
return t
}
@@ -72,12 +74,22 @@ func (self *Mbgw3Handler) Handle(message handler.MessageT) {
measurement.Values = make(map[string]database.VariableType)
unitMap := map[string]string { "Energy": "Wh", "Power": "W", "Voltage": "V", "Current": "A", "Volume": "m3" }
keyCount := make(map[string]int)
for k, v := range observation.Values {
unit, exists := unitMap[k]
if ! exists {
unit = "Unmapped Unit"
}
measurement.Values[k] = database.VariableType {
// Check if key already exists and create unique key if needed
keyCount[k]++
uniqueKey := k
if keyCount[k] > 1 {
uniqueKey = k + strconv.Itoa(keyCount[k])
}
measurement.Values[uniqueKey] = database.VariableType {
Label: "",
Variable: k,
Unit: unit,

View File

@@ -1,4 +1,4 @@
package snmp
package prepared
import (
"time"
@@ -10,7 +10,7 @@ import (
)
type SnmpHandler struct {
type PreparedHandler struct {
handler.CommonHandler
dbh *database.DatabaseHandle
}
@@ -19,6 +19,8 @@ type endpoint_t struct {
Label string `json:"label"`
Variable string `json:"variable"`
Value string `json:"value"`
Unit string `json:"unit"`
Status string `json:"status"`
}
type observation_t struct {
@@ -29,15 +31,16 @@ type observation_t struct {
func New(id string, config config.HandlerConfigT) handler.Handler {
t := &SnmpHandler {
t := &PreparedHandler {
}
t.Id = id
t.dbh = database.NewDatabaseHandle()
log.Printf("Handler Prepared %d initialized", id)
return t
}
func (self *SnmpHandler) Handle(message handler.MessageT) {
// log.Printf("Handler SNMP %d processing %s -> %s", self.Id, message.Topic, message.Payload)
func (self *PreparedHandler) Handle(message handler.MessageT) {
log.Printf("Handler Prepared %d processing %s -> %s", self.Id, message.Topic, message.Payload)
var observation observation_t
err := json.Unmarshal([]byte(message.Payload), &observation)
@@ -49,7 +52,7 @@ func (self *SnmpHandler) Handle(message handler.MessageT) {
var measurement database.Measurement
measurement.Time = time.Now()
measurement.Application = "SNMP"
measurement.Application = self.Id
measurement.Device = observation.Device
measurement.Attributes = map[string]interface{} {
@@ -61,12 +64,13 @@ func (self *SnmpHandler) Handle(message handler.MessageT) {
measurement.Values[k] = database.VariableType {
Label: v.Label,
Variable: v.Variable,
Unit: "",
Unit: v.Unit,
Value: v.Value,
Status: v.Status,
}
}
// log.Printf("Prepared measurement item: %s", measurement)
log.Printf("Prepared measurement item: %s", measurement)
self.dbh.StoreMeasurement(&measurement)
self.S()

View File

@@ -3,6 +3,7 @@ package pv
import (
"reflect"
"time"
"log"
"encoding/json"
"udi/config"
"udi/handlers/handler"
@@ -41,6 +42,7 @@ func New(id string, config config.HandlerConfigT) handler.Handler {
}
t.Id = id
t.dbh = database.NewDatabaseHandle()
log.Printf("Handler PV %d initialized", id)
return t
}

View File

@@ -1,29 +1,30 @@
package svej
import (
"log"
"time"
"strconv"
"strings"
"fmt"
"github.com/oliveagle/jsonpath"
"encoding/json"
"udi/config"
"udi/handlers/handler"
"udi/database"
"encoding/json"
"fmt"
"log"
"strconv"
"strings"
"time"
"udi/config"
"udi/database"
"udi/handlers/handler"
"github.com/oliveagle/jsonpath"
)
type SingleValueExtractorJsonpathHandler struct {
handler.CommonHandler
ready bool
application string
deviceSelector string
valueSelector string
unitSelector string
deviceJsonpath *jsonpath.Compiled
valueJsonpath *jsonpath.Compiled
unitJsonpath *jsonpath.Compiled
dbh *database.DatabaseHandle
handler.CommonHandler
ready bool
application string
deviceSelector string
valueSelector string
unitSelector string
deviceJsonpath *jsonpath.Compiled
valueJsonpath *jsonpath.Compiled
unitJsonpath *jsonpath.Compiled
dbh *database.DatabaseHandle
}
/*
@@ -33,133 +34,131 @@ T:TopicPartIndex
C:ConstantValue
*/
func New(id string, config config.HandlerConfigT) handler.Handler {
t := &SingleValueExtractorJsonpathHandler {
ready: false,
}
t := &SingleValueExtractorJsonpathHandler{
ready: false,
}
if config.Attributes["application"] == "" {
log.Println("Error: application not configured")
return t
}
t.application = config.Attributes["application"]
t.deviceSelector = config.Attributes["deviceSelector"]
if t.deviceSelector[:2] == "J:" {
jp, err := jsonpath.Compile(t.deviceSelector[2:])
if err != nil {
log.Printf("Unable to compile deviceJsonpath: %s, %s", t.deviceSelector[2:], err)
return t
}
t.deviceJsonpath = jp
}
t.valueSelector = config.Attributes["valueSelector"]
if t.valueSelector[:2] == "J:" {
jp, err := jsonpath.Compile(t.valueSelector[2:])
if err != nil {
log.Printf("Unable to compile valueJsonpath: %s, %s", t.valueSelector[2:], err)
return t
}
t.valueJsonpath = jp
}
t.unitSelector = config.Attributes["unitSelector"]
if t.unitSelector[:2] == "J:" {
jp, err := jsonpath.Compile(t.unitSelector[2:])
if err != nil {
log.Printf("Unable to compile unitJsonpath: %s, %s", t.unitSelector[2:], err)
return t
}
t.unitJsonpath = jp
}
if config.Attributes["application"] == "" {
log.Println("Error: application not configured")
return t
}
t.application = config.Attributes["application"]
t.Id = id
t.ready = true
t.dbh = database.NewDatabaseHandle()
return t
t.deviceSelector = config.Attributes["deviceSelector"]
if t.deviceSelector[:2] == "J:" {
jp, err := jsonpath.Compile(t.deviceSelector[2:])
if err != nil {
log.Printf("Unable to compile deviceJsonpath: %s, %s", t.deviceSelector[2:], err)
return t
}
t.deviceJsonpath = jp
}
t.valueSelector = config.Attributes["valueSelector"]
if t.valueSelector[:2] == "J:" {
jp, err := jsonpath.Compile(t.valueSelector[2:])
if err != nil {
log.Printf("Unable to compile valueJsonpath: %s, %s", t.valueSelector[2:], err)
return t
}
t.valueJsonpath = jp
}
t.unitSelector = config.Attributes["unitSelector"]
if t.unitSelector[:2] == "J:" {
jp, err := jsonpath.Compile(t.unitSelector[2:])
if err != nil {
log.Printf("Unable to compile unitJsonpath: %s, %s", t.unitSelector[2:], err)
return t
}
t.unitJsonpath = jp
}
t.Id = id
t.ready = true
t.dbh = database.NewDatabaseHandle()
log.Printf("Handler SVEJ %s initialized", id)
return t
}
func (self *SingleValueExtractorJsonpathHandler) ExtractionHelper(subTopics []string, jPayload interface{}, selector string, jp *jsonpath.Compiled) (string, error) {
var res string
switch selector[:2] {
case "J:":
// extract using jsonpath from payload
r, e := jp.Lookup(jPayload)
if e != nil {
return "", fmt.Errorf("jp.Lookup failed with %s", e)
}
res = fmt.Sprint(r)
case "T:":
// T: extract from topic
i, e := strconv.Atoi(selector[2:])
if e != nil {
return "", fmt.Errorf("Atoi failed with %s", e)
}
if i >= len(subTopics) {
return "", fmt.Errorf("not enough subtopics")
}
res = subTopics[i]
case "C:":
// use constant value
res = selector[2:]
default:
return "", fmt.Errorf("Invalid selector: %s", selector[:2])
}
return res, nil
func (self *SingleValueExtractorJsonpathHandler) ExtractionHelper(subTopics []string, jPayload interface{}, selector string, jp *jsonpath.Compiled) (interface{}, error) {
var res interface{}
switch selector[:2] {
case "J:":
// extract using jsonpath from payload
r, e := jp.Lookup(jPayload)
if e != nil {
return "", fmt.Errorf("jp.Lookup failed with %s", e)
}
res = r
case "T:":
// T: extract from topic
i, e := strconv.Atoi(selector[2:])
if e != nil {
return "", fmt.Errorf("Atoi failed with %s", e)
}
if i >= len(subTopics) {
return "", fmt.Errorf("not enough subtopics")
}
res = subTopics[i]
case "C:":
// use constant value
res = selector[2:]
default:
return "", fmt.Errorf("Invalid selector: %s", selector[:2])
}
return res, nil
}
func (self *SingleValueExtractorJsonpathHandler) Handle(message handler.MessageT) {
if ! self.ready {
self.Lost("Handler is not marked as ready", nil, message)
return
}
log.Printf("Handler SingleValueExtractorJsonpath %d processing %s -> %s", self.Id, message.Topic, message.Payload)
if !self.ready {
self.Lost("Handler is not marked as ready", nil, message)
return
}
log.Printf("Handler SingleValueExtractorJsonpath %d processing %s -> %s", self.Id, message.Topic, message.Payload)
var measurement database.Measurement
measurement.Time = time.Now()
measurement.Application = self.application
subTopics := strings.Split(message.Topic, "/")
log.Printf("Subtopics: %s", strings.Join(subTopics, ", "))
var jPayload interface{}
err := json.Unmarshal([]byte(message.Payload), &jPayload)
if err != nil {
self.Lost("Unable to unmarshal payload", err, message)
return
}
var measurement database.Measurement
measurement.Time = time.Now()
measurement.Application = self.application
device, err1 := self.ExtractionHelper(subTopics, jPayload, self.deviceSelector, self.deviceJsonpath)
if err1 != nil {
self.Lost("Device extraction failed", err1, message)
return
}
log.Printf("device: %s", device)
subTopics := strings.Split(message.Topic, "/")
log.Printf("Subtopics: %s", strings.Join(subTopics, ", "))
var jPayload interface{}
err := json.Unmarshal([]byte(message.Payload), &jPayload)
if err != nil {
self.Lost("Unable to unmarshal payload", err, message)
return
}
value, err2 := self.ExtractionHelper(subTopics, jPayload, self.valueSelector, self.valueJsonpath)
if err2 != nil {
self.Lost("Value extraction failed", err2, message)
return
}
device, err1 := self.ExtractionHelper(subTopics, jPayload, self.deviceSelector, self.deviceJsonpath)
if err1 != nil {
self.Lost("Device extraction failed", err1, message)
return
}
log.Printf("device: %s", device)
unit, err3 := self.ExtractionHelper(subTopics, jPayload, self.unitSelector, self.unitJsonpath)
if err3 != nil {
self.Lost("Unit extraction failed", err3, message)
return
}
value, err2 := self.ExtractionHelper(subTopics, jPayload, self.valueSelector, self.valueJsonpath)
if err2 != nil {
self.Lost("Value extraction failed", err2, message)
return
}
measurement.Device = device
unit, err3 := self.ExtractionHelper(subTopics, jPayload, self.unitSelector, self.unitJsonpath)
if err3 != nil {
self.Lost("Unit extraction failed", err3, message)
return
}
var variable database.VariableType
variable.Label = ""
variable.Variable = ""
variable.Unit = unit
variable.Value = value
measurement.Values = make(map[string]database.VariableType)
measurement.Values["Value"] = variable
measurement.Device = device.(string)
log.Printf("Prepared measurement item: %s", measurement)
self.dbh.StoreMeasurement(&measurement)
self.S()
var variable database.VariableType
variable.Label = ""
variable.Variable = ""
variable.Unit = unit.(string)
variable.Value = value
measurement.Values = make(map[string]database.VariableType)
measurement.Values["Value"] = variable
log.Printf("Prepared measurement item: %s", measurement)
self.dbh.StoreMeasurement(&measurement)
self.S()
}

View File

@@ -1,23 +1,22 @@
package sver
import (
"time"
"strconv"
"strings"
"regexp"
"log"
"udi/config"
"udi/handlers/handler"
"udi/database"
"log"
"regexp"
"strconv"
"strings"
"time"
"udi/config"
"udi/database"
"udi/handlers/handler"
)
type SingleValueExtractorRegexHandler struct {
handler.CommonHandler
ready bool
config localConfig
payloadRegex *regexp.Regexp
dbh *database.DatabaseHandle
handler.CommonHandler
ready bool
config localConfig
payloadRegex *regexp.Regexp
dbh *database.DatabaseHandle
}
const TOPIC_SEL = "topic"
@@ -26,174 +25,191 @@ const PAYLOAD_FULL_SEL = "payload-full"
const CONSTANT_SEL = "constant"
type localConfig struct {
application string
deviceFrom string
devicePart int
device string
valueFrom string
valuePart int
unitFrom string
unitPart int
unit string
application string
deviceFrom string
devicePart int
device string
valueFrom string
valuePart int
valueType string
unitFrom string
unitPart int
unit string
}
func New(id string, config config.HandlerConfigT) handler.Handler {
t := &SingleValueExtractorRegexHandler {
ready: false,
}
t := &SingleValueExtractorRegexHandler{
ready: false,
}
var localConfig localConfig
if config.Attributes["application"] == "" {
log.Println("Error: application not configured")
return t
}
localConfig.application = config.Attributes["application"]
payloadRegex := config.Attributes["payloadRegex"]
if payloadRegex != "" {
t.payloadRegex = regexp.MustCompile(payloadRegex)
} else {
t.payloadRegex = nil
}
var localConfig localConfig
if config.Attributes["application"] == "" {
log.Println("Error: application not configured")
return t
}
localConfig.application = config.Attributes["application"]
if config.Attributes["deviceFrom"] != TOPIC_SEL && config.Attributes["deviceFrom"] != PAYLOAD_SEL && config.Attributes["deviceFrom"] != CONSTANT_SEL {
log.Printf("Error: invalid value %s for deviceFrom", config.Attributes["deviceFrom"])
return t
}
localConfig.deviceFrom = config.Attributes["deviceFrom"]
payloadRegex := config.Attributes["payloadRegex"]
if payloadRegex != "" {
t.payloadRegex = regexp.MustCompile(payloadRegex)
} else {
t.payloadRegex = nil
}
devicePart, err1 := strconv.Atoi(config.Attributes["devicePart"])
if err1 != nil {
log.Printf("Error: unable to convert devicePart to number: %s", err1)
return t
}
localConfig.devicePart = devicePart
if config.Attributes["deviceFrom"] != TOPIC_SEL && config.Attributes["deviceFrom"] != PAYLOAD_SEL && config.Attributes["deviceFrom"] != CONSTANT_SEL {
log.Printf("Error: invalid value %s for deviceFrom", config.Attributes["deviceFrom"])
return t
}
localConfig.deviceFrom = config.Attributes["deviceFrom"]
// empty device is valid
localConfig.device = config.Attributes["device"]
devicePart, err1 := strconv.Atoi(config.Attributes["devicePart"])
if err1 != nil {
log.Printf("Error: unable to convert devicePart to number: %s", err1)
return t
}
localConfig.devicePart = devicePart
if config.Attributes["valueFrom"] != PAYLOAD_SEL && config.Attributes["valueFrom"] != PAYLOAD_FULL_SEL {
log.Printf("Error: invalid value %s for valueFrom", config.Attributes["valueFrom"])
return t
}
localConfig.valueFrom = config.Attributes["valueFrom"]
// empty device is valid
localConfig.device = config.Attributes["device"]
if config.Attributes["valueFrom"] == PAYLOAD_SEL {
valuePart, err2 := strconv.Atoi(config.Attributes["valuePart"])
if err2 != nil {
log.Printf("Error: unable to convert valuePart to number: %s", err2)
return t
}
localConfig.valuePart = valuePart
}
if config.Attributes["valueFrom"] != PAYLOAD_SEL && config.Attributes["valueFrom"] != PAYLOAD_FULL_SEL {
log.Printf("Error: invalid value %s for valueFrom", config.Attributes["valueFrom"])
return t
}
localConfig.valueFrom = config.Attributes["valueFrom"]
if config.Attributes["unitFrom"] != PAYLOAD_SEL && config.Attributes["unitFrom"] != CONSTANT_SEL {
log.Printf("Error: invalid value %s for unitFrom", config.Attributes["unitFrom"])
return t
}
localConfig.unitFrom = config.Attributes["unitFrom"]
if config.Attributes["valueFrom"] == PAYLOAD_SEL {
valuePart, err2 := strconv.Atoi(config.Attributes["valuePart"])
if err2 != nil {
log.Printf("Error: unable to convert valuePart to number: %s", err2)
return t
}
localConfig.valuePart = valuePart
}
if config.Attributes["unitFrom"] == PAYLOAD_SEL {
unitPart, err3 := strconv.Atoi(config.Attributes["unitPart"])
if err3 != nil {
log.Printf("Error: unable to convert unitPart to number: %s", err3)
return t
}
localConfig.unitPart = unitPart
}
if config.Attributes["valueType"] != "float" && config.Attributes["valueType"] != "string" {
log.Printf("Error: invalid value %s for valueType", config.Attributes["valueType"])
return t
}
localConfig.valueType = config.Attributes["valueType"]
// empty unit is valid
localConfig.unit = config.Attributes["unit"]
if config.Attributes["unitFrom"] != PAYLOAD_SEL && config.Attributes["unitFrom"] != CONSTANT_SEL {
log.Printf("Error: invalid value %s for unitFrom", config.Attributes["unitFrom"])
return t
}
localConfig.unitFrom = config.Attributes["unitFrom"]
t.config = localConfig
if config.Attributes["unitFrom"] == PAYLOAD_SEL {
unitPart, err3 := strconv.Atoi(config.Attributes["unitPart"])
if err3 != nil {
log.Printf("Error: unable to convert unitPart to number: %s", err3)
return t
}
localConfig.unitPart = unitPart
}
t.Id = id
t.ready = true
t.dbh = database.NewDatabaseHandle()
return t
// empty unit is valid
localConfig.unit = config.Attributes["unit"]
t.config = localConfig
t.Id = id
t.ready = true
t.dbh = database.NewDatabaseHandle()
log.Printf("Handler SVER %d initialized", id)
return t
}
func (self *SingleValueExtractorRegexHandler) Handle(message handler.MessageT) {
if ! self.ready {
self.Lost("Handler is not marked as ready", nil, message)
return
}
//log.Printf("Handler SingleValueExtractor %d processing %s -> %s", self.id, message.Topic, message.Payload)
if !self.ready {
self.Lost("Handler is not marked as ready", nil, message)
return
}
//log.Printf("Handler SingleValueExtractor %d processing %s -> %s", self.id, message.Topic, message.Payload)
var measurement database.Measurement
measurement.Time = time.Now()
measurement.Application = self.config.application
subTopics := strings.Split(message.Topic, "/")
//log.Printf("Subtopics: %s", strings.Join(subTopics, ", "))
var measurement database.Measurement
measurement.Time = time.Now()
measurement.Application = self.config.application
var payloadMatches []string
if self.payloadRegex != nil {
payloadMatches = self.payloadRegex.FindStringSubmatch(message.Payload)
//log.Printf("Matches: %s", strings.Join(payloadMatches, ", "))
}
subTopics := strings.Split(message.Topic, "/")
//log.Printf("Subtopics: %s", strings.Join(subTopics, ", "))
switch self.config.deviceFrom {
case TOPIC_SEL:
if self.config.devicePart >= len(subTopics) {
self.Lost("devicePart out of range", nil, message)
return
}
measurement.Device = subTopics[self.config.devicePart]
case PAYLOAD_SEL:
if self.payloadRegex == nil {
self.Lost("no payloadRegex defined, devicePart can't be used", nil, message)
return
}
if self.config.devicePart >= len(payloadMatches) {
self.Lost("devicePart out of range", nil, message)
return
}
measurement.Device = payloadMatches[self.config.devicePart]
case CONSTANT_SEL:
measurement.Device = self.config.device
}
var payloadMatches []string
if self.payloadRegex != nil {
payloadMatches = self.payloadRegex.FindStringSubmatch(message.Payload)
//log.Printf("Matches: %s", strings.Join(payloadMatches, ", "))
}
measurement.Values = make(map[string]database.VariableType)
var variable database.VariableType
variable.Label = ""
variable.Variable = ""
switch self.config.deviceFrom {
case TOPIC_SEL:
if self.config.devicePart >= len(subTopics) {
self.Lost("devicePart out of range", nil, message)
return
}
measurement.Device = subTopics[self.config.devicePart]
case PAYLOAD_SEL:
if self.payloadRegex == nil {
self.Lost("no payloadRegex defined, devicePart can't be used", nil, message)
return
}
if self.config.devicePart >= len(payloadMatches) {
self.Lost("devicePart out of range", nil, message)
return
}
measurement.Device = payloadMatches[self.config.devicePart]
case CONSTANT_SEL:
measurement.Device = self.config.device
}
switch self.config.valueFrom {
case PAYLOAD_SEL:
if self.payloadRegex == nil {
self.Lost("no payloadRegex defined, valuePart can't be used", nil, message)
return
}
if self.config.valuePart >= len(payloadMatches) {
self.Lost("valuePart out of range", nil, message)
return
}
variable.Value = payloadMatches[self.config.valuePart]
case PAYLOAD_FULL_SEL:
variable.Value = message.Payload
}
measurement.Values = make(map[string]database.VariableType)
var variable database.VariableType
variable.Label = ""
variable.Variable = ""
switch self.config.unitFrom {
case PAYLOAD_SEL:
if self.payloadRegex == nil {
self.Lost("no payloadRegex defined, unitPart can't be used", nil, message)
return
}
if self.config.unitPart >= len(payloadMatches) {
self.Lost("unitPart out of range", nil, message)
return
}
variable.Unit = payloadMatches[self.config.unitPart]
case CONSTANT_SEL:
variable.Unit = self.config.unit
}
var value string
switch self.config.valueFrom {
case PAYLOAD_SEL:
if self.payloadRegex == nil {
self.Lost("no payloadRegex defined, valuePart can't be used", nil, message)
return
}
if self.config.valuePart >= len(payloadMatches) {
self.Lost("valuePart out of range", nil, message)
return
}
value = payloadMatches[self.config.valuePart]
case PAYLOAD_FULL_SEL:
value = message.Payload
}
if self.config.valueType == "float" {
fValue, err := strconv.ParseFloat(value, 64)
if err != nil {
self.Lost("Unable to convert value to float", err, message)
return
}
variable.Value = fValue
} else {
variable.Value = value
}
measurement.Values["Value"] = variable
switch self.config.unitFrom {
case PAYLOAD_SEL:
if self.payloadRegex == nil {
self.Lost("no payloadRegex defined, unitPart can't be used", nil, message)
return
}
if self.config.unitPart >= len(payloadMatches) {
self.Lost("unitPart out of range", nil, message)
return
}
variable.Unit = payloadMatches[self.config.unitPart]
case CONSTANT_SEL:
variable.Unit = self.config.unit
}
//log.Printf("Prepared measurement item: %s", measurement)
self.dbh.StoreMeasurement(&measurement)
self.S()
measurement.Values["Value"] = variable
//log.Printf("Prepared measurement item: %s", measurement)
self.dbh.StoreMeasurement(&measurement)
self.S()
}

View File

@@ -1,15 +0,0 @@
package gs361ah04
type Observation struct {
LinkQuality uint8 `unit:"" json:"linkquality"`
Battery uint8 `unit:"%" json:"battery"`
AwayMode string `unit:"" json:"away_mode"`
ChildLock string `unit:"" json:"child_lock"`
CurrentHeatingSetpoint float32 `unit:"°C" json:"current_heating_setpoint"`
LocalTemperature float32 `unit:"°C" json:"local_temperature"`
Preset string `unit:"" json:"preset"`
SystemMode string `unit:"" json:"system_mode"`
ValveDetection string `unit:"" json:"valve_detection"`
WindowDetection string `unit:"" json:"window_detection"`
}

View File

@@ -1,11 +0,0 @@
package wsdcgq01lm
type Observation struct {
LinkQuality uint8 `unit:"" json:"linkquality"`
Battery uint8 `unit:"%" json:"battery"`
Humidity float32 `unit:"%H" json:"humidity"`
Pressure float32 `unit:"mbar" json:"pressure"`
Temperature float32 `unit:"°C" json:"temperature"`
Voltage uint16 `unit:"mV" json:"voltage"`
}

View File

@@ -1,10 +0,0 @@
package wsdcgq11lm
type Observation struct {
LinkQuality uint8 `unit:"" json:"linkquality"`
Battery uint8 `unit:"%" json:"battery"`
Humidity float32 `unit:"%H" json:"humidity"`
Temperature float32 `unit:"°C" json:"temperature"`
Voltage uint16 `unit:"mV" json:"voltage"`
}

View File

@@ -1,112 +1,80 @@
package z2m
import (
"fmt"
"log"
"time"
"strings"
"reflect"
"encoding/json"
"udi/config"
"udi/handlers/handler"
"udi/database"
"udi/handlers/z2m/models/wsdcgq11lm"
"udi/handlers/z2m/models/wsdcgq01lm"
"udi/handlers/z2m/models/gs361ah04"
"encoding/json"
"fmt"
"log"
"strings"
"time"
"udi/config"
"udi/database"
"udi/handlers/handler"
)
type Z2MHandler struct {
handler.CommonHandler
dbh *database.DatabaseHandle
}
func parse(T any, payload string, variables *map[string]database.VariableType) error {
observationType := reflect.TypeOf(T)
observation := reflect.New(observationType).Interface()
err := json.Unmarshal([]byte(payload), observation)
if err != nil {
return fmt.Errorf("Unable to parse payload into Observation struct: %v, %s", err, payload)
}
observationValue := reflect.ValueOf(observation).Elem()
for i := 0; i < observationType.NumField(); i++ {
field := observationType.Field(i)
name := field.Name
unit := field.Tag.Get("unit")
value := observationValue.Field(i).Interface()
(*variables)[name] = database.VariableType {
Label: name,
Variable: "y",
Unit: unit,
Value: value,
}
}
return nil
handler.CommonHandler
dbh *database.DatabaseHandle
}
func New(id string, config config.HandlerConfigT) handler.Handler {
t := &Z2MHandler {
}
t.Id = id
t.dbh = database.NewDatabaseHandle()
return t
t := &Z2MHandler{}
t.Id = id
t.dbh = database.NewDatabaseHandle()
log.Printf("Handler Z2M %d initialized", id)
return t
}
func (self *Z2MHandler) Handle(message handler.MessageT) {
log.Printf("Handler Z2M %d processing %s -> %s", self.Id, message.Topic, message.Payload)
log.Printf("Handler Z2M %d processing %s -> %s", self.Id, message.Topic, message.Payload)
var measurement database.Measurement
measurement.Time = time.Now()
var measurement database.Measurement
measurement.Time = time.Now()
subTopics := strings.Split(message.Topic, "/")
deviceId := subTopics[1]
log.Printf("DeviceId: %s", deviceId)
subTopics := strings.Split(message.Topic, "/")
deviceId := subTopics[1]
log.Printf("DeviceId: %s", deviceId)
device, err1 := self.dbh.GetDeviceByLabel(deviceId)
if err1 != nil {
self.Lost("Error when loading device", err1, message)
return
}
log.Printf("Device: %s", device)
measurement.Device = deviceId
measurement.Application = device.Application.Label
measurement.Device = device.Attributes["Label"].(string)
// Parse JSON direkt in eine map
var jsonData map[string]interface{}
err := json.Unmarshal([]byte(message.Payload), &jsonData)
if err != nil {
self.Lost("Failed to parse JSON payload", err, message)
return
}
var T any
switch device.DeviceType.ModelIdentifier {
case "WSDCGQ11LM":
T = wsdcgq11lm.Observation{}
case "WSDCGQ01LM":
T = wsdcgq01lm.Observation{}
case "GS361A-H04":
T = gs361ah04.Observation{}
default:
self.Lost(fmt.Sprintf("No parser found for %s", device.DeviceType.ModelIdentifier), nil, message)
return
}
measurement.Attributes = make(map[string]interface{})
measurement.Values = make(map[string]database.VariableType)
measurement.Values = make(map[string]database.VariableType)
measurement.Attributes = make(map[string]interface{})
err3 := parse(T,
message.Payload,
&(measurement.Values))
if err3 != nil {
self.Lost("Model parser failed", err3, message)
return
}
// Extract device info for application naming
if deviceData, ok := jsonData["device"]; ok {
if deviceMap, ok := deviceData.(map[string]any); ok {
manufacturerId, hasManufacturer := deviceMap["manufacturerID"]
model, hasModel := deviceMap["model"]
measurement.Attributes["Status"] = "ok"
measurement.Attributes["DeviceId"] = deviceId
measurement.Attributes["DeviceModel"] = device.DeviceType.ModelIdentifier
log.Printf("Prepared measurement item: %s", measurement)
self.dbh.StoreMeasurement(&measurement)
self.S()
if !hasManufacturer || !hasModel {
self.Lost("Missing manufacturerID or model in device data", fmt.Errorf("manufacturerID: %v, model: %v", hasManufacturer, hasModel), handler.MessageT{})
return
}
measurement.Application = "z2m_" + fmt.Sprintf("%v", manufacturerId) + "_" + model.(string)
}
delete(jsonData, "device")
}
// Konvertiere die restlichen Elemente in VariableType-Map
for key, value := range jsonData {
measurement.Values[key] = database.VariableType{
Label: key,
Variable: "",
Unit: "",
Value: value,
}
}
measurement.Attributes["Status"] = "ok"
log.Printf("Prepared measurement item: %s", measurement)
self.dbh.StoreMeasurement(&measurement)
self.S()
}

View File

@@ -1,18 +0,0 @@
package main
import "log"
import "udi/database"
func main() {
log.SetPrefix("UDI Migrate Schema: ")
log.SetFlags(log.Ldate | log.Ltime | log.Lshortfile)
log.Println("Starting")
database.Migrate()
log.Println("Done")
}