Compare commits

...

79 Commits

Author SHA1 Message Date
c1a8a0b8f2 add decoder for lsn50 3-way, fix 1
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2024-02-09 13:40:21 +01:00
8dbef7c647 add decoder for lsn50 3-way
Some checks failed
ci/woodpecker/push/woodpecker Pipeline failed
2024-02-09 13:38:55 +01:00
42b307ff7b fix image name in deploy script
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2024-02-07 22:46:35 +01:00
943516f1ac upgrade some modules due to vulnerabilities
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2024-02-07 22:41:01 +01:00
1a8e76dc32 add trivy in pipeline
Some checks failed
ci/woodpecker/push/woodpecker Pipeline failed
2024-02-07 22:35:15 +01:00
3e4c621645 test
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
2024-01-30 16:54:50 +01:00
c8e60df30b test 2024-01-30 16:33:09 +01:00
664a2831ab test
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
2024-01-30 16:28:49 +01:00
00524c0a3f label in snmp measurements
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2024-01-26 14:44:23 +01:00
3af9482880 fix
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2024-01-25 15:27:33 +01:00
df353d4f6c skip diff value
Some checks failed
ci/woodpecker/push/woodpecker Pipeline failed
ci/woodpecker/tag/woodpecker Pipeline failed
2024-01-25 15:24:11 +01:00
d1bbbeaccf snmp
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2024-01-25 15:13:36 +01:00
8cfc92c226 some more views
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
2024-01-15 11:12:23 +01:00
08e81e309c locative handler, config adjusted
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2024-01-15 10:16:58 +01:00
f44664eaad locative handler
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2024-01-15 10:10:51 +01:00
15458b9955 hottisScd30
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2024-01-07 18:48:47 +01:00
f55990cc57 hottisScd30
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2024-01-07 18:23:22 +01:00
766355f85d hottisScd30
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2024-01-07 18:14:59 +01:00
73aaa2225d hottisScd30
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2024-01-07 17:28:32 +01:00
44b7461d19 fix migrate pv
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
2024-01-04 22:22:19 +01:00
c54b335e5f queries and migrations
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
2024-01-03 20:57:26 +01:00
956d1bdcdb fix status in migration
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
2023-12-28 15:56:30 +01:00
b938d48c7f add status in ttn handlers
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2023-12-28 15:48:51 +01:00
b374b7f49d some queries
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
2023-12-28 13:04:25 +01:00
879825a260 format paylaod
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2023-12-27 14:21:21 +01:00
b6132afb11 disable logging in ttn
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2023-12-27 12:14:05 +01:00
5e94782575 add RawPayloadPrinter
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2023-12-27 12:06:50 +01:00
57c63adeb2 handover FrmPayload to model parsers too 2023-12-27 12:00:02 +01:00
e209598f9e secrets
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2023-12-24 15:57:53 +01:00
03f8f9fade fix in deploy script
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2023-12-24 14:41:49 +01:00
fc91a0da2e change db password approach
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2023-12-24 14:28:46 +01:00
7d8d8b1c6a soil hottis
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2023-12-24 11:05:37 +01:00
ffbda52c36 migration stuff
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
2023-12-23 22:57:27 +01:00
647a2d36e5 saerbeck
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2023-12-23 21:57:43 +01:00
a8db62ea52 LSE01
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
2023-12-22 17:28:32 +01:00
8e6bea3f19 dt1t and counter and refactoring using embedded interfaces
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2023-12-21 13:05:00 +01:00
99d678b4b1 dt1t
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
2023-12-20 17:15:44 +01:00
3779547a95 dt1t
Some checks failed
ci/woodpecker/push/woodpecker Pipeline failed
2023-12-20 14:14:55 +01:00
caffafdfbc fix secret name
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2023-12-19 14:05:54 +01:00
f5d271bba9 add udi-berresheim
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2023-12-19 14:01:59 +01:00
a69b33ac32 fix ci, 6, remove debug
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2023-12-19 13:03:32 +01:00
9041034723 fix ci, 5
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2023-12-19 12:11:41 +01:00
dae37100f5 fix ci, 4
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2023-12-19 12:05:07 +01:00
f6728eb898 fix ci, 3
Some checks failed
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline failed
2023-12-19 11:56:45 +01:00
e18aeed273 fix ci, 2
Some checks failed
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline failed
2023-12-19 11:53:08 +01:00
4eab542960 fix ci
Some checks failed
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline failed
2023-12-19 11:50:22 +01:00
c77394bf4d secrets handling, part 2 2023-12-19 11:47:37 +01:00
7eb7ec4798 secrets handling 2023-12-19 11:43:29 +01:00
bcc74dda29 ci fixed
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2023-12-18 22:05:31 +01:00
291fec96d1 ci fixed
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2023-12-18 21:41:21 +01:00
52f1b4680d ci fixed
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2023-12-18 21:35:36 +01:00
6398c1978f deploy script
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
2023-12-18 18:16:01 +01:00
9d435159ce configMap
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
2023-12-17 22:47:26 +01:00
96377e9572 start deployment stuff
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
2023-12-17 22:36:13 +01:00
ee2c5f31e8 database configuration only via PG* env variables and MQTT password only via configured env var
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
2023-12-17 13:53:16 +01:00
6d932f56c8 groundlevel stuff
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
2023-12-15 17:29:12 +01:00
7ee7d4df89 Dragino parser
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
2023-12-15 11:44:35 +01:00
084f9fbf31 queries
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
2023-12-14 15:56:04 +01:00
b9203c4cb3 fix deploy
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2023-12-14 11:21:34 +01:00
7205f9794a fix deploy
Some checks failed
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline failed
2023-12-14 11:17:38 +01:00
985d05b0a0 fix deploy
Some checks failed
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline failed
2023-12-14 11:14:50 +01:00
d8677d685b config
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2023-12-13 11:40:17 +01:00
2dd830907d emu stuff
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
2023-12-12 16:39:22 +01:00
e99c9023a0 changes, still not working
Some checks failed
ci/woodpecker/push/woodpecker Pipeline failed
2023-12-11 22:35:36 +01:00
b3de6182b3 emu
Some checks failed
ci/woodpecker/push/woodpecker Pipeline failed
2023-12-11 16:50:28 +01:00
668fc20be9 that's better
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
2023-12-08 17:17:05 +01:00
ea9db110a5 model parser
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
2023-12-08 17:13:56 +01:00
4950b67afd basic ttn parsing
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
2023-12-08 15:41:11 +01:00
77ac44742b ttn
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
2023-12-07 19:52:00 +01:00
ad34f9b27b confgi
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
2023-12-07 15:14:22 +01:00
65909becd6 Merge branch 'regex-jsonpath-separation'
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2023-12-06 14:33:18 +01:00
d7c30ef0eb svej
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
2023-12-06 14:32:50 +01:00
8085f8937e rename sve to sver, fixes in sver 2023-12-06 12:30:53 +01:00
22b1203ea8 not working jsonpath stuff
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
2023-12-06 12:21:36 +01:00
8cf4562056 jsonpath
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2023-12-05 18:10:09 +01:00
695f78a632 remove debug
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
2023-12-05 17:13:27 +01:00
ff659b648c reloader
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2023-12-05 17:09:22 +01:00
a5209dad8f fix configuration
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
2023-12-05 16:21:30 +01:00
00a9eceea8 additional queries
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2023-12-05 13:54:37 +01:00
61 changed files with 2967 additions and 188 deletions

4
.gitignore vendored
View File

@ -1,5 +1,7 @@
config-*.json
src/udi/udi
src/udi/migrate_schema
tmp/
ENVDB
ENVDB.cluster
deployment/secrets.txt
deployment/secrets

View File

@ -2,8 +2,7 @@ steps:
build:
image: plugins/kaniko
settings:
repo:
from_secret: image_name
repo: ${FORGE_NAME}/${CI_REPO}
registry:
from_secret: container_registry
tags: latest,${CI_COMMIT_SHA},${CI_COMMIT_TAG}
@ -15,16 +14,26 @@ steps:
when:
- event: [push, tag]
scan_image:
image: aquasec/trivy
commands:
- trivy image $FORGE_NAME/$CI_REPO:$CI_COMMIT_SHA --quiet --exit-code 1
when:
- event: [push, tag]
deploy:
image: portainer/kubectl-shell:latest
secrets:
- source: kube_config
target: KUBE_CONFIG_CONTENT
- source: image_name
target: IMAGE_NAME
- source: encryption_key
target: ENCRYPTION_KEY
- source: secrets_checksum
target: MD5_CHECKSUM
commands:
- export IMAGE_TAG=$CI_COMMIT_TAG
- printf "$KUBE_CONFIG_CONTENT" > /tmp/kubeconfig
- export KUBECONFIG=/tmp/kubeconfig
- cat $CI_WORKSPACE/deployment/deploy-yml.tmpl | sed -e 's,%IMAGE%,'$IMAGE_NAME':'$CI_COMMIT_TAG',' | kubectl apply -f -
- ./deployment/deploy.sh
when:
- event: tag

View File

@ -13,4 +13,3 @@ ENV UDI_CONF ""
COPY --from=builder /go/src/udi ./
ENTRYPOINT ["./udi"]

43
deployment/decrypt-secrets.sh Executable file
View File

@ -0,0 +1,43 @@
#!/bin/bash
if [ "$ENCRYPTION_KEY" = "" ]; then
echo "ENCRYPTION_KEY not set"
exit 1
fi
if [ "$MD5_CHECKSUM" = "" ]; then
echo "No checksum given"
exit 1
fi
SECRETS_CIPHERTEXT_FILE=secrets.enc
SECRETS_PLAINTEXT_FILE=/tmp/secrets
TMP_FILE=`mktemp`
POD_NAME_SUFFIX=`date +%s`
cat $SECRETS_CIPHERTEXT_FILE | \
kubectl run openssl-$POD_NAME_SUFFIX \
--rm \
--image bitnami/debian-base-buildpack:latest \
--env KEY=$ENCRYPTION_KEY \
-i \
-q \
-- \
/bin/sh -c "openssl enc -aes-256-cbc -salt -pass env:KEY -a -d" > \
$TMP_FILE
if [ `uname` = "Darwin" ]; then
CALCULATED_CHECKSUM=`cat $TMP_FILE | md5`
elif [ `uname` = "Linux" ]; then
CALCULATED_CHECKSUM=`cat $TMP_FILE | md5sum - | awk '{print $1}'`
fi
if [ "$MD5_CHECKSUM" != "$CALCULATED_CHECKSUM" ]; then
echo "Invalid checksum"
exit 1
fi
# cat $TMP_FILE
mv $TMP_FILE $SECRETS_PLAINTEXT_FILE

View File

@ -1,8 +1,7 @@
apiVersion: v1
kind: PersistentVolumeClaim
metadata:
name: udi-archive
namespace: udi
name: %PRE%-udi-archive
spec:
accessModes:
- ReadWriteOnce
@ -14,10 +13,11 @@ spec:
apiVersion: apps/v1
kind: Deployment
metadata:
name: udi
namespace: udi
name: %PRE%-udi
labels:
app: udi
annotations:
secret.reloader.stakater.com/reload: "%PRE%-udi-conf,%PRE%-udi-db-cred,%PRE%-mqtt-password"
spec:
replicas: 1
selector:
@ -31,17 +31,18 @@ spec:
containers:
- name: udi
image: %IMAGE%
env:
- name: UDI_CONF
valueFrom:
secretKeyRef:
name: udi-conf
key: UDI_CONF
envFrom:
- secretRef:
name: %PRE%-udi-db-cred
- secretRef:
name: %PRE%-mqtt-password
- configMapRef:
name: %PRE%-udi-conf
volumeMounts:
- mountPath: /archive
name: udi-archive
volumes:
- name: udi-archive
persistentVolumeClaim:
claimName: udi-archive
claimName: %PRE%-udi-archive

92
deployment/deploy.sh Executable file
View File

@ -0,0 +1,92 @@
#!/bin/bash
if [ "$IMAGE_TAG" == "" ]; then
echo "Make sure IMAGE_TAG is set"
exit 1
fi
IMAGE_NAME=$FORGE_NAME/$CI_REPO
CONFIG_FILE=config.json
DEPLOYMENT_DIR=$PWD/deployment
INSTANCES_DIR=$DEPLOYMENT_DIR/instances
pushd $DEPLOYMENT_DIR > /dev/null
./decrypt-secrets.sh || exit 1
. /tmp/secrets
rm /tmp/secrets
popd > /dev/null
for NAMESPACE_DIR in `find $INSTANCES_DIR -type d -mindepth 1 -maxdepth 1`; do
NAMESPACE=`basename $NAMESPACE_DIR`
echo "Namespace: $NAMESPACE"
kubectl create namespace $NAMESPACE \
--dry-run=client \
-o yaml | \
kubectl -f - apply
pushd $NAMESPACE_DIR > /dev/null
for INSTANCE_DIR in `find . -type d -mindepth 1 -maxdepth 1`; do
pushd $INSTANCE_DIR > /dev/null
INSTANCE=`basename $INSTANCE_DIR`
echo "Instance: $INSTANCE"
# set secret configuration from encrypted and decrypted file
VARIABLE_PREFIX=`echo "$NAMESPACE""_""$INSTANCE" | tr - _`
# set MQTT_PASSWORD as secret
MQTT_PASSWORD_VARIABLE=$VARIABLE_PREFIX"_MQTT_PASSWORD"
MQTT_PASSWORD="${!MQTT_PASSWORD_VARIABLE}"
# echo "MQTT_PASSWORD_VARIABLE: $MQTT_PASSWORD_VARIABLE"
# echo "MQTT_PASSWORD: $MQTT_PASSWORD"
kubectl create secret generic $INSTANCE-mqtt-password \
--from-literal=MQTT_PASSWORD="$MQTT_PASSWORD" \
--dry-run=client \
-o yaml \
--save-config | \
kubectl apply -f - -n $NAMESPACE
LOGIN_VARIABLE=$VARIABLE_PREFIX"_PGUSER"
NEW_UDI_DB_LOGIN="${!LOGIN_VARIABLE}"
PASSWORD_VARIABLE=$VARIABLE_PREFIX"_PGPASSWORD"
NEW_UDI_DB_PASSWORD="${!PASSWORD_VARIABLE}"
DATABASE_VARIABLE=$VARIABLE_PREFIX"_PGDATABASE"
NEW_UDI_DB_DATABASE="${!DATABASE_VARIABLE}"
NEW_UDI_DB_HOST=timescaledb.database.svc.cluster.local
kubectl create secret generic $INSTANCE-udi-db-cred \
--dry-run=client \
-o yaml \
--save-config \
--from-literal=PGUSER="$NEW_UDI_DB_LOGIN" \
--from-literal=PGPASSWORD="$NEW_UDI_DB_PASSWORD" \
--from-literal=PGDATABASE="$NEW_UDI_DB_DATABASE" \
--from-literal=PGHOST="$NEW_UDI_DB_HOST" \
--from-literal=PGSSLMODE="require" | \
kubectl apply -f - -n $NAMESPACE
# set configuration as configMap
kubectl create configmap $INSTANCE-udi-conf \
--from-literal=UDI_CONF="`cat $CONFIG_FILE`" \
--dry-run=client \
-o yaml \
--save-config | \
kubectl apply -f - -n $NAMESPACE
# prepare k8s deployment statement
cat $DEPLOYMENT_DIR/deploy-yml.tmpl | \
sed -e 's,%IMAGE%,'$IMAGE_NAME':'$IMAGE_TAG','g | \
sed -e 's,%PRE%,'$INSTANCE','g | \
kubectl apply -f - -n $NAMESPACE
popd > /dev/null
done
popd > /dev/null
done

27
deployment/encrypt-secrets.sh Executable file
View File

@ -0,0 +1,27 @@
#!/bin/bash
ENCRYPTION_KEY=`openssl rand -hex 32`
echo $ENCRYPTION_KEY
SECRETS_PLAINTEXT_FILE=secrets.txt
SECRETS_CIPHERTEXT_FILE=secrets.enc
if [ `uname` = "Darwin" ]; then
cat $SECRETS_PLAINTEXT_FILE | md5
elif [ `uname` = "Linux" ]; then
cat $SECRETS_PLAINTEXT_FILE | md5sum - | awk '{print $1}'
fi
POD_NAME_SUFFIX=`date +%s`
cat $SECRETS_PLAINTEXT_FILE | \
kubectl run openssl-$POD_NAME_SUFFIX \
--rm \
--image bitnami/debian-base-buildpack:latest \
--env KEY=$ENCRYPTION_KEY \
-i \
-q \
-- \
/bin/sh -c "openssl enc -aes-256-cbc -salt -pass env:KEY -a" > \
$SECRETS_CIPHERTEXT_FILE

View File

@ -0,0 +1,23 @@
{
"mqtt": {
"broker": "ssl://eu1.cloud.thethings.network:8883",
"username": "de-hottis-level-monitoring@ttn",
"password": "ENV",
"tlsEnable": "true"
},
"topicMappings": [
{
"topics": [ "v3/#" ],
"handler": "TTN",
"id": "TTN0",
"config": {
"databaseConnStr": "",
"attributes": {
}
}
}
],
"archiver": {
"dir": "/archive"
}
}

View File

@ -0,0 +1,21 @@
{
"mqtt": {
"broker": "ssl://eu1.cloud.thethings.network:8883",
"username": "com-passavant-geiger-poc@ttn",
"tlsEnable": "true"
},
"topicMappings": [
{
"topics": [ "v3/com-passavant-geiger-poc@ttn/devices/#" ],
"handler": "TTN",
"id": "TTN0",
"config": {
"attributes": {
}
}
}
],
"archiver": {
"dir": "/archive"
}
}

View File

@ -0,0 +1,22 @@
{
"mqtt": {
"broker": "ssl://eu1.cloud.thethings.network:8883",
"username": "de-hottis-saerbeck-monitoring@ttn",
"password": "ENV",
"tlsEnable": "true"
},
"topicMappings": [
{
"topics": [ "v3/#" ],
"handler": "TTN",
"id": "TTN0",
"config": {
"attributes": {
}
}
}
],
"archiver": {
"dir": "/archive"
}
}

View File

@ -0,0 +1,22 @@
{
"mqtt": {
"broker": "ssl://eu1.cloud.thethings.network:8883",
"username": "de-hottis-level-monitoring@ttn",
"password": "ENV",
"tlsEnable": "true"
},
"topicMappings": [
{
"topics": [ "v3/#" ],
"handler": "TTN",
"id": "TTN0",
"config": {
"attributes": {
}
}
}
],
"archiver": {
"dir": "/archive"
}
}

View File

@ -0,0 +1,22 @@
{
"mqtt": {
"broker": "ssl://eu1.cloud.thethings.network:8883",
"username": "de-hottis-lora-test1@ttn",
"password": "ENV",
"tlsEnable": "true"
},
"topicMappings": [
{
"topics": [ "v3/#" ],
"handler": "TTN",
"id": "TTN0",
"config": {
"attributes": {
}
}
}
],
"archiver": {
"dir": "/archive"
}
}

View File

@ -0,0 +1,154 @@
{
"mqtt": {
"broker": "mqtt://emqx01-anonymous-cluster-internal.broker.svc.cluster.local:1883",
"tlsEnable": "false"
},
"topicMappings": [
{
"topics": [ "snmp" ],
"handler": "SNMP",
"id": "SNMP",
"config": {
"attributes": {
}
}
},
{
"topics": [ "dt1/ai/periodic/1" ],
"handler": "DT1T",
"id": "DT1T.0",
"config": {
"attributes": {
"Application": "Temperature Wago",
"Device": "Freezer",
"HardLow": "-273",
"SoftLow": "-50",
"SoftHigh": "20",
"HardHigh": "100"
}
}
},
{
"topics": [ "dt1/ai/periodic/3" ],
"handler": "DT1T",
"id": "DT1T.1",
"config": {
"attributes": {
"Application": "Temperature Wago",
"Device": "Outdoor",
"HardLow": "-273",
"SoftLow": "-60",
"SoftHigh": "60",
"HardHigh": "100"
}
}
},
{
"topics": [ "IoT/PV/Values" ],
"handler": "PV",
"id": "PV",
"config": {
"databaseConnStr": "",
"attributes": {
}
}
},
{
"topics": [ "locative/event/#" ],
"handler": "Locative",
"id": "Locative",
"config": {
"databaseConnStr": "",
"attributes": {
}
}
},
{
"topics": [ "IoT/MBGW3/Measurement" ],
"handler": "MBGW3",
"id": "MBGW3",
"config": {
"databaseConnStr": "",
"attributes": {
}
}
},
{
"topics": [ "IoT/OneWireGW/Bus 1/#" ],
"handler": "SVER",
"id": "SVER0",
"config": {
"databaseConnStr": "",
"attributes": {
"application": "Temperature Heating",
"payloadRegex": "(\\d+(\\.\\d+)?)\\s*([^0-9\\s]\\S*)",
"deviceFrom": "topic",
"devicePart": "3",
"valueFrom": "payload",
"valuePart": "1",
"unitFrom": "payload",
"unitPart": "3"
}
}
},
{
"topics": [ "NR/Multisensor/+/Temperatur" ],
"handler": "SVEJ",
"id": "SVEJ0",
"config": {
"databaseConnStr": "",
"attributes": {
"application": "Temperature Multisensor",
"deviceSelector": "T:2",
"valueSelector": "J:$.CurrentTemperature",
"unitSelector": "C:°C"
}
}
},
{
"topics": [ "NR/Multisensor/+/Feuchte" ],
"handler": "SVEJ",
"id": "SVEJ1",
"config": {
"databaseConnStr": "",
"attributes": {
"application": "Humidity Multisensor",
"deviceSelector": "T:2",
"valueSelector": "J:$.CurrentRelativeHumidity",
"unitSelector": "C:%"
}
}
},
{
"topics": [ "shellyplusht/+/status/temperature:0" ],
"handler": "SVEJ",
"id": "SVEJ2",
"config": {
"databaseConnStr": "",
"attributes": {
"application": "Temperature Shelly Plus HT",
"deviceSelector": "T:1",
"valueSelector": "J:$.tC",
"unitSelector": "C:°C"
}
}
},
{
"topics": [ "shellyplusht/+/status/humidity:0" ],
"handler": "SVEJ",
"id": "SVE4",
"config": {
"databaseConnStr": "",
"attributes": {
"application": "Humidity Shelly Plus HT",
"deviceSelector": "T:1",
"valueSelector": "J:$.rh",
"unitSelector": "C:%"
}
}
}
],
"archiver": {
"dir": "/archive"
}
}

View File

@ -0,0 +1,22 @@
{
"mqtt": {
"broker": "ssl://eu1.cloud.thethings.network:8883",
"username": "de-hottis-app01@ttn",
"password": "ENV",
"tlsEnable": "true"
},
"topicMappings": [
{
"topics": [ "v3/#" ],
"handler": "TTN",
"id": "TTN0",
"config": {
"attributes": {
}
}
}
],
"archiver": {
"dir": "/archive"
}
}

View File

@ -1,25 +0,0 @@
#!/bin/bash
FILE=$1
if [ "$FILE" = "" ]; then
echo "give config file to load as first argument"
exit 1
fi
SECRET_NAME=$2
if [ "$SECRET_NAME" = "" ]; then
echo "give secret name to create/modify as second argument"
exit 1
fi
NAMESPACE=$3
if [ "$NAMESPACE" = "" ]; then
echo "give namespace as third argument"
exit 1
fi
kubectl create secret generic $SECRET_NAME \
--from-literal=UDI_CONF="`cat $FILE`" \
-n $NAMESPACE \
--dry-run=client \
-o yaml \
--save-config | \
kubectl apply -f -

38
deployment/secrets.enc Normal file
View File

@ -0,0 +1,38 @@
U2FsdGVkX1+v6L4gc+CbYCZyo/UVN7QfmEntIBpk+GAHGf3d7m/4hfcYd39Eh2td
lXSmNdt1cdFw/UfZ1x1OlGm/fqLh/j/rWPgEc6BwEcDFDEXpTucTjUHNDonYNH8j
eDWeAGokfguqgQG16CBLHdeyocP0kTPJSrIKQgG1Mzzck/kfB1Z6Ggv4z5KEx2dy
2rrnm+BeFT1yITwoxa3iJeudcSQznNIqQa+Mx4fUsPV+yorahp4gs0PVVj9POnAT
yRhpQgkaq5oZNVcYrWS5+6mmhbzL5jIAa4wfzVep/69RcfBkV5Oj5JJGaQzH0T74
wg8dWz/scdi2kkCn0KroJPrsG/lAsFYhbX4kUJQeRUX1pWr/iwD0i8LRx+f2C82Y
HgpsnG6c5nPRy68TltgRgCRAIJj87rR/fATVowcpChfe9sXCwfLEZ5Q2hDK8eAPW
VS87axMkProyHJZe1GK0v9CAVWpXlxv6eAr8u2SftGA87Xu3ebQ4SjReXIcAb7M6
08UnxW4YcfH+usgU2GUuNlzRctAq334AfBWYQO51l/ELJAzaDi6Ht4Czr6R7Bsfh
M3ZcjcgqY7j7ywDFmKq/a8Q0Dsjm2sezNtrrRWusomgSKFEf8WncOdkcWOAiza4T
+Qubfr1SuZuWFF+migGtYM3X8YS+VpmMRIpJ1otibMELgjvldWGqHIK1uIThLq7F
MvQ0Nog6UNg79/8vrUoEUPPB5fQsXcNC5zcpVMrpJcGogBHhsXk1EPFcB75sx/65
bl2BZlCBacH9MNIBPh17dMC46EV1FNaLiO1N3/qJkxrkiG5wBDjDlnyMn/mYc/o1
olNuIO0nnn2x8ZU02lRo8RqcqOywseZeBhAzOj+899n5Qa/0YQAnb0Y9WAxqLft/
0C45HcK5Kgd3C6wqvVUqcQ/UMxQzv0y1cM8gbfpGjUvJ6gUj7vkW08D55A6gV8Lf
SrneWAP/1B1mmV16vHaXwoYpTpQwM7i7fHWBOpH7nq6E+0P3LHyon43dYo4P6KM7
He3R6phTFp36WI4ZCUQafTDZS196Ol2ZyEAonVwSOIEIyptXeoAmleolXC/eL84Z
bEbhld8g+ulrVSrBXFpCY3jBsqPVBYEpZaGYgevsrHPSbwWa/qQkTKnOO4+oz6Pe
9iJ1yJbSWfg6Gkr6iqE41Dp4VGXtwTDHHb9YMd56iWHAkxZLFIWdYUr8XfQS6j70
j5kV3jV/w5EHGYruBdtxAWc7YKq3pfqvh9R7dD/8JOFZhA140+zmOCWG4qdDhv+5
F9vlawudssa9ZHGi1jBFPCNW13LBhUdyCY3apKF4HHeeuA465uzxIqwtkJSigdun
vC9ooYZrJjYOnJSTJnKH0WSD0pPC6CIkge+Fxuksq6cst5Zcysw1xz5zs7UNeAP+
kLs1+8Kn2d1hJuzSWdWlj7xGratLEdA6pqcfBKvMYtY0kpPPDrxm+F1FZ7LyV+dc
G1vfI6aS2azrFrBNXSeOArJ/erGHIGhWxFY0c3bcGOjXwsLWRjQ03Kdj9ffj6UFL
4JJaI0I01RilAo+woaZhNmOHl1VxSsU1lDGF7IvW3t0qKLaSg/Rv3pQqdKyjq8I5
IxPlUEMdo1EDZZx4qLmYBM1tWhgMbn4nx6P2BS7obnPdaf3B0RPxI68Z49RYZKvR
/wTyr7oWCCRQDwCuVH8t/jUrSWspzEK7ApXHdh7T9JlNurFW7oxc8ylooQrAn3Gn
mru7X3cUeVtiosAklZ7w+JNxm44IRmDKNVDeAaat+q35EA8MRFGiuXEOeNw54tWH
zNkUyUJ79Ie7BkGrZFUFqkvfY3Q/xLaBGYDQe65S8/rerybL0YI7RmMiz4x7yq8L
GoIDwPsn0z/AFefoGTi0tAXZeC+EA62okK1kKR9qrh9gmD59uiMbFX1BHe3rWhgP
cCPScYeameXV3K6wwQpX8JTdptqMAH5cpEVoUZ/PZZpkaiCuWcMODVbqTpm4SRPt
Q9s5+6/g0TUUqz7Fwi0dlfnMZVuK0a1Uf/SBYR7f/UYVLfF5juTZ+IRJwQWwp6QX
CzfYms0W34/srtM72mQOpKTd0o3xuFyVbQtZPOpNghIjArQqwt34nEzXPYHqasDx
c/yIPdW+B/YVcFPdRV16Izqmjdlupv6pPjY/T6GdHczQsH9gD28HN9+Ka2Cvficf
evO7IXe0RuvodQ3tB4LmeWoJB10G7Sko2EEfpFTDXke9Ak/5cGrpdPMtbXCAIm1o
B5UhrqNuUYSWdo0mGttbSjFR7pyLujsxLNnp8teBi33QOUhrSId5+mOvtFDGiZKa
QCC+W+BIh6IFIwnxH4dDxjz3M65NXzqNV+6mXEFU77cX+oTF4BRe0R/L4nPoaBAN
smRxtqBItpVFUdsOVb6bXg==

1
migration/.gitignore vendored Normal file
View File

@ -0,0 +1 @@
.venv

View File

@ -0,0 +1,38 @@
import psycopg2
from loguru import logger
try:
srcConn = psycopg2.connect(database="level_monitoring_berresheim")
srcConn.autocommit = False
destConn = psycopg2.connect(database="udi-berresheim")
destConn.autocommit = False
with srcConn.cursor() as srcCur, destConn.cursor() as destCur:
srcCur.execute("select time, application_name, raw_level, level, status, battery from measurement_t")
for srcObj in srcCur:
timestamp = srcObj[0]
deviceName = srcObj[1]
rawLevel = srcObj[2]
level = srcObj[3]
status = srcObj[4]
battery = srcObj[5]
logger.info(f"{timestamp=}, {deviceName=}, {rawLevel=}, {level=}, {status=}, {battery=}")
destTime = timestamp
destApplication = "de-hottis-level-monitoring"
destDevice = "eui-a84041a2c18341d6"
destAttributes = '{"ApplicationId":"de-hottis-level-monitoring", "DeviceType":"dragino-ldds75", "Status":"' + status + '","Hint": "Migrated"}'
destValues = '{"Battery":{"unit":"V","label":"Battery","value":' + str(battery) + ',"variable":"Voltage"}, "Distance":{"unit":"mm","label":"Distance","variable":"Level","value":' + str(rawLevel) + '}, "CorrectedDistance":{"unit":"mm", "label":"CorrectedDistance", "variable":"Level","value":' + str(level) + '}}'
logger.info(f"{destTime=}, {destApplication=}, {destDevice=}, {destAttributes=}, {destValues=}")
destCur.execute("insert into measurements (time, application, device, attributes, values) values(%s, %s, %s, %s, %s)",
(destTime, destApplication, destDevice, destAttributes, destValues))
destConn.commit()
finally:
if srcConn:
srcConn.close()
if destConn:
destConn.close()

79
migration/migrate-pv.py Normal file
View File

@ -0,0 +1,79 @@
import psycopg2
from loguru import logger
import os
srcPgHost = os.environ["SRC_PGHOST"]
srcPgUser = os.environ["SRC_PGUSER"]
srcPgPassword = os.environ["SRC_PGPASSWORD"]
srcPgDatabase = os.environ["SRC_PGDATABASE"]
destPgHost = os.environ["DEST_PGHOST"]
destPgUser = os.environ["DEST_PGUSER"]
destPgPassword = os.environ["DEST_PGPASSWORD"]
destPgDatabase = os.environ["DEST_PGDATABASE"]
try:
srcConn = psycopg2.connect(
host=srcPgHost,
dbname=srcPgDatabase,
user=srcPgUser,
password=srcPgPassword,
sslmode='require'
)
srcConn.autocommit = False
destConn = psycopg2.connect(
host=destPgHost,
dbname=destPgDatabase,
user=destPgUser,
password=destPgPassword,
sslmode='require'
)
destConn.autocommit = False
with srcConn.cursor() as srcCur, destConn.cursor() as destCur:
srcCur.execute("select time, deviceid, status, state, importenergyactive, importenergyreactive, exportenergyactive, exportenergyreactive, powerapparent, poweractive, powerreactive, powerdemandpositive, powerdemandreverse, factor, angle, voltage, current, powerdemand from pv_power_measurement_t order by time")
for srcObj in srcCur:
timestamp = srcObj[0]
deviceName = srcObj[1]
status = srcObj[2]
state = srcObj[3]
importenergyactive = srcObj[4]
importenergyreactive = srcObj[5]
exportenergyactive = srcObj[6]
exportenergyreactive = srcObj[7]
powerapparent = srcObj[8]
poweractive = srcObj[9]
powerreactive = srcObj[10]
powerdemandpositive = srcObj[11]
powerdemandreverse = srcObj[12]
factor = srcObj[13]
angle = srcObj[14]
voltage = srcObj[15]
current = srcObj[16]
powerdemand = srcObj[17]
logger.info(f"{timestamp=}, {deviceName=}")
destTime = timestamp
destApplication = "PV"
destDevice = "Powermeter"
destAttributes = f"{{\"ApplicationId\":\"PV\", \"Status\":\"{status}\",\"Hint\": \"Migrated\"}}"
destValues = f"{{\"Cnt\": {{\"unit\": \"\", \"label\": \"\", \"value\": \"-1\", \"variable\": \"Cnt\"}}, \"Angle\": {{\"unit\": \"degree\", \"label\": \"\", \"value\": \"{angle}\", \"variable\": \"Angle\"}}, \"State\": {{\"unit\": \"\", \"label\": \"\", \"value\": \"{state}\", \"variable\": \"State\"}}, \"Factor\": {{\"unit\": \"\", \"label\": \"\", \"value\": \"{factor}\", \"variable\": \"Factor\"}}, \"Current\": {{\"unit\": \"A\", \"label\": \"\", \"value\": \"{current}\", \"variable\": \"Current\"}}, \"Voltage\": {{\"unit\": \"V\", \"label\": \"\", \"value\": \"{voltage}\", \"variable\": \"Voltage\"}}, \"PowerActive\": {{\"unit\": \"W\", \"label\": \"\", \"value\": \"{poweractive}\", \"variable\": \"PowerActive\"}}, \"PowerApparent\": {{\"unit\": \"VA\", \"label\": \"\", \"value\": \"{powerapparent}\", \"variable\": \"PowerApparent\"}}, \"PowerReactive\": {{\"unit\": \"VA\", \"label\": \"\", \"value\": \"{powerreactive}\", \"variable\": \"PowerReactive\"}}, \"ExportEnergyActive\": {{\"unit\": \"Wh\", \"label\": \"\", \"value\": \"{exportenergyactive}\", \"variable\": \"ExportEnergyActive\"}}, \"ImportEnergyActive\": {{\"unit\": \"Wh\", \"label\": \"\", \"value\": \"{importenergyactive}\", \"variable\": \"ImportEnergyActive\"}}, \"PowerDemandReverse\": {{\"unit\": \"W\", \"label\": \"\", \"value\": \"{powerdemandreverse}\", \"variable\": \"PowerDemandReverse\"}}, \"PowerDemandPositive\": {{\"unit\": \"W\", \"label\": \"\", \"value\": \"{powerdemandpositive}\", \"variable\": \"PowerDemandPositive\"}}, \"ExportEnergyReactive\": {{\"unit\": \"VAh\", \"label\": \"\", \"value\": \"{exportenergyreactive}\", \"variable\": \"ExportEnergyReactive\"}}, \"ImportEnergyReactive\": {{\"unit\": \"VAh\", \"label\": \"\", \"value\": \"{importenergyreactive}\", \"variable\": \"ImportEnergyReactive\"}}}}"
logger.info(f"{destTime=}, {destApplication=}, {destDevice=}, {destAttributes=}, {destValues=}")
try:
destCur.execute("insert into measurements (time, application, device, attributes, values) values(%s, %s, %s, %s, %s)",
(destTime, destApplication, destDevice, destAttributes, destValues))
destConn.commit()
except Exception as e:
destConn.rollback()
logger.error(f"Error {e} when inserted time {destTime}")
finally:
if srcConn:
srcConn.close()
if destConn:
destConn.close()

View File

@ -0,0 +1,78 @@
import psycopg2
from loguru import logger
import os
srcPgHost = os.environ["SRC_PGHOST"]
srcPgUser = os.environ["SRC_PGUSER"]
srcPgPassword = os.environ["SRC_PGPASSWORD"]
srcPgDatabase = os.environ["SRC_PGDATABASE"]
destPgHost = os.environ["DEST_PGHOST"]
destPgUser = os.environ["DEST_PGUSER"]
destPgPassword = os.environ["DEST_PGPASSWORD"]
destPgDatabase = os.environ["DEST_PGDATABASE"]
try:
srcConn = psycopg2.connect(
host=srcPgHost,
dbname=srcPgDatabase,
user=srcPgUser,
password=srcPgPassword,
sslmode='require'
)
srcConn.autocommit = False
destConn = psycopg2.connect(
host=destPgHost,
dbname=destPgDatabase,
user=destPgUser,
password=destPgPassword,
sslmode='require'
)
destConn.autocommit = False
with srcConn.cursor() as srcCur, destConn.cursor() as destCur:
srcCur.execute("select time, location, status, temperature, category from room_climate_measurement_t where category = 'heating' and time > '2023-12-19 05:20:00' order by time")
for srcObj in srcCur:
timestamp = srcObj[0]
location = srcObj[1]
status = srcObj[2]
temperature = srcObj[3]
category = srcObj[4]
logger.info(f"{timestamp=}, {location=}, {status=}, {temperature=}, {category=}")
destTime = timestamp
match category:
case 'heating':
destApplication = 'Temperature Heating'
case 'Outdoor':
destApplication = 'Temperature Wago'
case 'Device':
destApplication = 'Temperature Wago'
case 'Indoor':
destApplication = 'Temperature Multisensor' if location != 'Anna-Koeln-2' else 'Temperature Shelly Plus HT'
case 'Special':
destApplication = 'Temperature Multisensor'
destDevice = location
destAttributes = '{"ApplicationId":"temperature-imported", "Status":"' + status + '","Location":"' + location + '","Category":"' + category + '","Hint": "Migrated"}'
destValues = '{"Value": {"unit": "°C", "label": "", "value": "' + str(temperature) + '", "variable": ""}}'
logger.info(f"{destTime=}, {destApplication=}, {destDevice=}, {destAttributes=}, {destValues=}")
try:
destCur.execute("insert into measurements (time, application, device, attributes, values) values(%s, %s, %s, %s, %s)",
(destTime, destApplication, destDevice, destAttributes, destValues))
destConn.commit()
except Exception as e:
destConn.rollback()
logger.error(f"Error {e} when inserted time {destTime}")
finally:
if srcConn:
srcConn.close()
if destConn:
destConn.close()

View File

@ -0,0 +1,2 @@
loguru==0.7.2
psycopg2==2.9.9

8
queries/berresheim.sql Normal file
View File

@ -0,0 +1,8 @@
create or replace view level_v as
select time,
cast(values->'CorrectedDistance'->>'value' as float) as level,
cast(values->'Battery'->>'value' as float) as battery,
attributes->>'Status' as status,
device
from measurements
where application = 'de-hottis-level-monitoring';

View File

@ -23,3 +23,75 @@ create or replace view power_v as
where application = 'Power' and
attributes->>'Status' = 'Ok';
create or replace view temperature_heating_v as
select time,
cast(values->'Value'->>'value' as float) as temperature,
device
from measurements
where application = 'Temperature Heating';
create or replace view gas_v as
select time,
cast(values->'Volume'->>'value' as float) as volume,
device
from measurements
where application = 'Gas' and
attributes->>'Status' = 'Ok';
create or replace view temperature_v as
select time,
cast(values->'Value'->>'value' as float) as temperature,
device
from measurements
where application in ('Temperature Multisensor', 'Temperature Shelly Plus HT');
create or replace view temperature2_v as
select time,
cast(values->'Value'->>'value' as float) as temperature,
device
from measurements
where application = 'Temperature Wago';
create or replace view humidity_v as
select time,
cast(values->'Value'->>'value' as float) as humidity,
device
from measurements
where application in ('Humidity Multisensor');
create or replace view soil_v as
select time,
cast(values->'Water'->>'value' as float) as water,
cast(values->'Conductance'->>'value' as float) as conductance,
cast(values->'Temperature'->>'value' as float) as temperature,
device
from measurements
where application = 'de-hottis-app01' and attributes->>'DeviceType' = 'dragino-lse01';
create or replace view co2_v as
select time,
cast(m.values->'CO2concentration'->>'value' as float) as co2concentration,
cast(m.values->'Humidity'->>'value' as float) as humidity,
cast(m.values->'Temperature'->>'value' as float) as temperature,
m.device as device,
d.attributes->>'Label' as label
from measurements m, devices d
where m.application = 'de-hottis-app01' and
m.attributes->>'DeviceType' = 'hottis-scd30' and
m.device = d.label;
create or replace view locative_v as
select time,
device as person,
values->'Location'->>'value' as location,
values->'Trigger'->>'value' as direction
from measurements
where application = 'Locative';
create or replace view router_v as
select time,
device,
cast(values->'wan-in'->>'value' as int) as wanInOctetsPerSeconds,
cast(values->'wan-out'->>'value' as int) as wanOutOctetsPerSeconds
from measurements
where application = 'SNMP' and device = '172.16.3.1';

View File

@ -0,0 +1,11 @@
select
extract('day' from time)::varchar || '.' || extract('month' from time)::varchar || '.' || extract('year' from time)::varchar as day,
avg(temperature)::numeric(10,0) as temperature
from room_climate_measurement_t
where
category = 'Outdoor' and
location = 'Outdoor' and
extract('hour' from time) = 12 and
time::date = now()::date
group by day

View File

@ -0,0 +1,73 @@
-- query
with
first_day_in_year as (
select
date_trunc('day', min(time)) as day
from pv_power_measurement_t
where
time between date_trunc('year', time) and now()
),
first_value_in_year as (
select
time_bucket('1 day', time) as interval,
first(exportenergyactive, time) as energy
from pv_power_measurement_t
where
time between (select day from first_day_in_year) and (select day from first_day_in_year) + interval '1 day' and
status = 'Ok'
group by interval
),
first_day_in_month as (
select
date_trunc('day', min(time)) as day
from pv_power_measurement_t
where
time between date_trunc('month', now()) and now()
),
first_value_in_month as (
select
time_bucket('1 day', time) as interval,
first(exportenergyactive, time) as energy
from pv_power_measurement_t
where
time between (select day from first_day_in_month) and (select day from first_day_in_month) + interval '1 day' and
status = 'Ok'
group by interval
),
first_value_in_day as (
select
time_bucket('1 day', time) as interval,
first(exportenergyactive, time) as energy
from pv_power_measurement_t
where time >= date_trunc('day', now())
group by interval
),
last_value as (
select
time_bucket('1 day', time) as interval,
last(exportenergyactive, time) as energy
from pv_power_measurement_t
where
time between date_trunc('day', now()) and date_trunc('day', now()) + interval '1 day' and
status = 'Ok'
group by interval
)
select
extract(year from (select day from first_day_in_year))::text as period_value,
'Year' as period_name,
round(((select energy from last_value) - (select energy from first_value_in_year))::numeric, 2) as yield
union
select
to_char((select day from first_day_in_month), 'Month') as period_value,
'Month' as period_name,
round(((select energy from last_value) - (select energy from first_value_in_month))::numeric, 2) as yield
union
select
now()::date::text as period_value,
'Day' as period_name,
round(((select energy from last_value) - (select energy from first_value_in_day))::numeric, 2) as yield;
-- output format
-- wn@atuin:~/Workspace/go-workspace/src/universal-data-ingest [main ≡ +0 ~1 -0 !]$ mosquitto_sub -h 172.23.1.102 -v -t IoT/PV/Yields
-- IoT/PV/Yields {"Month":"1.43","Year":"285.39","Day":"0.00"}

14
queries/pg.sql Normal file
View File

@ -0,0 +1,14 @@
create or replace view power_v as
select time,
cast(values->'ActivePowerL1'->>'value' as float) as power_l1,
cast(values->'ActivePowerL2'->>'value' as float) as power_l2,
cast(values->'ActivePowerL3'->>'value' as float) as power_l3,
cast(values->'ActivePowerL123'->>'value' as float) as power_total,
cast(values->'PowerfactorL1'->>'value' as float) as factor_l1,
cast(values->'PowerfactorL2'->>'value' as float) as factor_l2,
cast(values->'PowerfactorL3'->>'value' as float) as factor_l3,
device
from measurements
where application = 'com-passavant-geiger-poc' and
attributes->>'FPort' = '1';

14
src/udi/ENVDB.udiload Normal file
View File

@ -0,0 +1,14 @@
if [ "$1" = "" ]; then
echo "set namespace as argument"
fi
N=$1
if [ "$2" = "" ]; then
echo "set instance as argument"
fi
I=$2
PGHOST=`kubectl get services traefik -n system -o jsonpath="{.status.loadBalancer.ingress[0].ip}"`
PGPASSWORD=`kubectl get secrets $I-udi-db-cred -n $N -o jsonpath="{.data.PGPASSWORD}" | base64 --decode`
PGUSER=`kubectl get secrets $I-udi-db-cred -n $N -o jsonpath="{.data.PGUSER}" | base64 --decode`
PGSSLMODE=`kubectl get secrets $I-udi-db-cred -n $N -o jsonpath="{.data.PGSSLMODE}" | base64 --decode`
PGDATABASE=`kubectl get secrets $I-udi-db-cred -n $N -o jsonpath="{.data.PGDATABASE}" | base64 --decode`
export PGUSER PGHOST PGPASSWORD PGSSLMODE PGDATABASE

6
src/udi/ENVDB.uditest Normal file
View File

@ -0,0 +1,6 @@
PGUSER="uditest"
PGHOST=`kubectl get services traefik -n system -o jsonpath="{.status.loadBalancer.ingress[0].ip}"`
PGPASSWORD=`kubectl get secrets uditest-db-cred -n udi-test -o jsonpath="{.data.PGPASSWORD}" | base64 --decode`
PGSSLMODE=require
PGDATABASE="uditest"
export PGUSER PGHOST PGPASSWORD PGSSLMODE PGDATABASE

View File

@ -0,0 +1,21 @@
{
"mqtt": {
"broker": "ssl://eu1.cloud.thethings.network:8883",
"username": "de-hottis-lora-test1@ttn",
"tlsEnable": "true"
},
"topicMappings": [
{
"topics": [ "v3/#" ],
"handler": "TTN",
"id": "TTN0",
"config": {
"attributes": {
}
}
}
],
"archiver": {
"dir": "./tmp/udi"
}
}

133
src/udi/config-test.json Normal file
View File

@ -0,0 +1,133 @@
{
"mqtt": {
"broker": "mqtt://172.23.1.102:1883",
"tlsEnable": "false"
},
"topicMappings": [
{
"topics": [ "IoT/PV/Values" ],
"handler": "PV",
"id": "PV",
"config": {
"attributes": {
}
}
},
{
"topics": [ "IoT/MBGW3/Measurement" ],
"handler": "MBGW3",
"id": "MBGW3",
"config": {
"attributes": {
}
}
},
{
"topics": [ "dt1/ai/periodic/1" ],
"handler": "DT1T",
"id": "DT1T.0",
"config": {
"attributes": {
"Application": "Temperature Wago",
"Device": "Freezer",
"HardLow": "-273",
"SoftLow": "-50",
"SoftHigh": "20",
"HardHigh": "100"
}
}
},
{
"topics": [ "dt1/ai/periodic/3" ],
"handler": "DT1T",
"id": "DT1T.1",
"config": {
"attributes": {
"Application": "Temperature Wago",
"Device": "Outdoor",
"HardLow": "-273",
"SoftLow": "-60",
"SoftHigh": "60",
"HardHigh": "100"
}
}
},
{
"topics": [ "IoT/OneWireGW/Bus 1/#" ],
"handler": "SVER",
"id": "SVER0",
"config": {
"databaseConnStr": "",
"attributes": {
"application": "Temperature Heating",
"payloadRegex": "(\\d+(\\.\\d+)?)\\s*([^0-9\\s]\\S*)",
"deviceFrom": "topic",
"devicePart": "3",
"valueFrom": "payload",
"valuePart": "1",
"unitFrom": "payload",
"unitPart": "3"
}
}
},
{
"topics": [ "NR/Multisensor/+/Temperatur" ],
"handler": "SVEJ",
"id": "SVEJ0",
"config": {
"databaseConnStr": "",
"attributes": {
"application": "Temperature Multisensor",
"deviceSelector": "T:2",
"valueSelector": "J:$.CurrentTemperature",
"unitSelector": "C:°C"
}
}
},
{
"topics": [ "NR/Multisensor/+/Feuchte" ],
"handler": "SVEJ",
"id": "SVEJ1",
"config": {
"databaseConnStr": "",
"attributes": {
"application": "Humidity Multisensor",
"deviceSelector": "T:2",
"valueSelector": "J:$.CurrentRelativeHumidity",
"unitSelector": "C:%"
}
}
},
{
"topics": [ "shellyplusht/+/status/temperature:0" ],
"handler": "SVEJ",
"id": "SVEJ2",
"config": {
"databaseConnStr": "",
"attributes": {
"application": "Temperature Shelly Plus HT",
"deviceSelector": "T:1",
"valueSelector": "J:$.tC",
"unitSelector": "C:°C"
}
}
},
{
"topics": [ "shellyplusht/+/status/humidity:0" ],
"handler": "SVEJ",
"id": "SVE4",
"config": {
"databaseConnStr": "",
"attributes": {
"application": "Humidity Shelly Plus HT",
"deviceSelector": "T:1",
"valueSelector": "J:$.rh",
"unitSelector": "C:%"
}
}
}
],
"archiver": {
"dir": "./tmp/udi"
}
}

View File

@ -5,7 +5,6 @@ import "log"
import "os"
type HandlerConfigT struct {
DatabaseConnStr string `json:"databaseConnStr"`
Attributes map[string]string `json:"attributes"`
}
@ -13,14 +12,15 @@ type ConfigT struct {
Mqtt struct {
Broker string `json:"broker"`
Username string `json:"username"`
Password string `json:"password"`
Password string
TlsEnable string `json:"tlsEnable"`
} `json:"mqtt"`
TopicMappings []struct {
Id string `json:"id"`
Topics []string `json:"topics"`
Handler string `json:"handler"`
Config HandlerConfigT `json:"config"`
} `json:"topicMappings"`
Handlers map[string]HandlerConfigT `json:"handlers"`
Archiver struct {
Dir string `json:"dir"`
}
@ -34,5 +34,7 @@ func LoadConfiguration() {
if err != nil {
log.Fatalf("Unable to parse configuration: %s", err)
}
Config.Mqtt.Password = os.Getenv("MQTT_PASSWORD")
}

View File

@ -1,26 +0,0 @@
{
"mqtt": {
"broker": "172.23.1.102:1883",
"username": "",
"password": "",
"tlsEnable": "false"
},
"topicMappings": [
{
"topics": ["IoT/MBGW3/Measurement"],
"handler": "IoT"
}
],
"handlers": [
{
"name": "IoT",
"databaseConnStr": "",
"attributes": {
}
}
],
"archiver": {
"dir": "/mnt/udi/archive"
}
}

View File

@ -0,0 +1,94 @@
package counter
import (
"log"
"time"
"encoding/json"
)
type statsTuple_t struct {
Successful int `json:"good"`
Failed int `json:"bad"`
}
type stats_t struct {
Received statsTuple_t `json:"received"`
Archived statsTuple_t `json:"archived"`
Dispatched statsTuple_t `json:"dispatched"`
Handled map[string]statsTuple_t `json:"handled"`
Stored statsTuple_t `json:"stored"`
}
var stats stats_t
func S(id string) {
switch id {
case "Received":
stats.Received.Successful = stats.Received.Successful + 1
case "Archived":
stats.Archived.Successful += 1
case "Dispatched":
stats.Dispatched.Successful += 1
case "Stored":
stats.Stored.Successful += 1
default:
log.Printf("Unknown stats id %s", id)
}
}
func F(id string) {
switch id {
case "Received":
stats.Received.Failed += 1
case "Archived":
stats.Archived.Failed += 1
case "Dispatched":
stats.Dispatched.Failed += 1
case "Stored":
stats.Stored.Failed += 1
default:
log.Printf("Unknown stats id %s", id)
}
}
func SH(id string) {
if _, ok := stats.Handled[id]; ok {
tuple := stats.Handled[id]
tuple.Successful += 1
stats.Handled[id] = tuple
} else {
stats.Handled[id] = statsTuple_t { Successful:1, Failed:0, }
}
}
func FH(id string) {
if _, ok := stats.Handled[id]; ok {
tuple := stats.Handled[id]
tuple.Failed += 1
stats.Handled[id] = tuple
} else {
stats.Handled[id] = statsTuple_t { Successful:0, Failed:1, }
}
}
func InitCounter() {
stats = stats_t {
Received: statsTuple_t {Successful:0,Failed:0,},
Archived: statsTuple_t {Successful:0,Failed:0,},
Dispatched: statsTuple_t {Successful:0,Failed:0,},
Stored: statsTuple_t {Successful:0,Failed:0,},
Handled: make(map[string]statsTuple_t),
}
go func() {
for {
sj, err := json.Marshal(stats)
if err != nil {
log.Printf("Unable to marshal stats object: %s", err)
}
log.Println(string(sj))
time.Sleep(time.Second * 60)
}
}()
}

View File

@ -35,8 +35,8 @@ type DeviceType struct {
type Device struct {
gorm.Model
Label string `gorm:"not null"`
ApplicationID int `gorm:"not null"`
Label string `gorm:"not null;uniqueIndex:idx_label_application_id"`
ApplicationID int `gorm:"not null;uniqueIndex:idx_label_application_id"`
Application Application
DeviceTypeID int `gorm:"not null"`
DeviceType DeviceType

View File

@ -4,6 +4,8 @@ package database
import (
"log"
//"time"
"fmt"
"udi/counter"
"gorm.io/driver/postgres"
"gorm.io/gorm"
)
@ -13,33 +15,60 @@ type DatabaseHandle struct {
dbh *gorm.DB
}
func NewDatabaseHandle(dsn string) *DatabaseHandle {
func NewDatabaseHandle() *DatabaseHandle {
var db DatabaseHandle
conn, err := gorm.Open(postgres.Open(dsn))
// inject the whole database configuration via the well-known PG* env variables
conn, err := gorm.Open(postgres.Open(""))
if err != nil {
log.Printf("Unable to open database connection: %s", err)
db.initialized = false
} else {
db.dbh = conn
db.initialized = true
log.Println("Database connection opened")
//log.Println("Database connection opened")
}
return &db
}
func (dbh *DatabaseHandle) StoreMeasurement(measurement *Measurement) {
if ! dbh.initialized {
func (self *DatabaseHandle) StoreMeasurement(measurement *Measurement) {
if ! self.initialized {
log.Printf("Database connection not initialized, can not store, measurement %s lost", measurement)
counter.F("Stored")
return
}
result := dbh.dbh.Create(measurement)
result := self.dbh.Create(measurement)
if result.Error != nil {
log.Printf("Unable to insert, measurement %s lost, error: %s", measurement, result.Error)
counter.F("Stored")
return
}
log.Println("Successfully stored measurement")
//log.Println("Successfully stored measurement")
counter.S("Stored")
}
func (self *DatabaseHandle) GetDeviceByLabelAndApplication(applicationLabel string, deviceLabel string) (*Device, error) {
if ! self.initialized {
err := fmt.Errorf("Database connection not initialized")
return nil, err
}
var device Device
result := self.dbh.
Preload("Application").
Preload("DeviceType").
Joins("JOIN applications ON devices.application_id = applications.id").
Where("devices.label = ? AND applications.label = ?", deviceLabel, applicationLabel).
First(&device)
if result.Error != nil {
err := fmt.Errorf("Query failed: %s", result.Error)
return nil, err
}
return &device, nil
}

View File

@ -7,31 +7,71 @@ import "fmt"
import "net/url"
import "udi/mqtt"
import "udi/config"
import "udi/counter"
import "udi/handlers/handler"
import "udi/handlers/ttn"
import "udi/handlers/iot"
import "udi/handlers/pv"
import "udi/handlers/mbgw3"
import "udi/handlers/sve"
import "udi/handlers/sver"
import "udi/handlers/svej"
import "udi/handlers/dt1t"
import "udi/handlers/locative"
import "udi/handlers/snmp"
var handlerMap map[string]handler.Handler = make(map[string]handler.Handler)
var archiverChannel chan handler.MessageT = make(chan handler.MessageT, 100)
func InitDispatcher() {
log.Printf("Initializing dispatcher")
log.Printf("Dispatcher initializing")
go archiver()
handlerMap["TTN"] = ttn.NewTTNHandler()
handlerMap["IoT"] = iot.NewIoTHandler()
handlerMap["PV"] = pv.NewPvHandler(config.Config.Handlers["PV"])
handlerMap["MBGW3"] = mbgw3.NewMbgw3Handler(config.Config.Handlers["MBGW3"])
handlerMap["SVE"] = sve.NewSveHandler(config.Config.Handlers["SVE"])
for _, mapping := range config.Config.TopicMappings {
// log.Printf("Trying to initialize %s", mapping)
var factory interface{}
switch mapping.Handler {
case "TTN":
factory = ttn.New
case "IoT":
factory = iot.New
case "PV":
factory = pv.New
case "MBGW3":
factory = mbgw3.New
case "SVER":
factory = sver.New
case "SVEJ":
factory = svej.New
case "DT1T":
factory = dt1t.New
case "Locative":
factory = locative.New
case "SNMP":
factory = snmp.New
default:
factory = nil
log.Printf("No handler %s found, ignore mapping", mapping.Handler)
}
fn, ok := factory.(func(string, config.HandlerConfigT) handler.Handler)
if ! ok {
log.Println("Typ Assertion failed")
break
}
handler := fn(mapping.Id, mapping.Config)
handlerMap[mapping.Id] = handler
}
//log.Printf("handlerMap: %s", handlerMap)
}
func storeMessage(filename string, item handler.MessageT) {
file, err := os.OpenFile(filename, os.O_APPEND | os.O_CREATE | os.O_WRONLY, 0644)
if err != nil {
log.Printf("Unable to open archiving file %s, message is not archived: %s", filename, err)
counter.F("Archived")
return
}
defer file.Close()
@ -39,9 +79,11 @@ func storeMessage(filename string, item handler.MessageT) {
_, err = file.WriteString(string(archivingString) + "\n")
if err != nil {
log.Printf("Unable to write message, message is not archived: %s", err)
counter.F("Archived")
return
}
log.Println("Successfully archived message")
//log.Println("Successfully archived message")
counter.S("Archived")
}
func archiver() {
@ -57,9 +99,10 @@ func archiver() {
err := os.MkdirAll(currentArchivingDir, 0755)
if err != nil {
log.Printf("Unable to create archiving dir %s: %s", currentArchivingDir, err)
counter.F("Archived")
}
lastArchivingDir = currentArchivingDir
log.Printf("Archiving dir %s created", currentArchivingDir)
//log.Printf("Archiving dir %s created", currentArchivingDir)
}
archivingFilename := fmt.Sprintf("%s/%s", currentArchivingDir, url.PathEscape(message.Topic))
storeMessage(archivingFilename, message)
@ -71,7 +114,7 @@ func InputDispatcher() {
for {
select {
case mqttMessage := <- mqtt.InputChannel:
log.Printf("Message arrived in inputDispatcher, topic: %s\n", mqttMessage.Topic)
//log.Printf("Message arrived in inputDispatcher, topic: %s\n", mqttMessage.Topic)
message := handler.MessageT { time.Now(), mqttMessage.Topic, string(mqttMessage.Payload) }
archiverChannel <- message
handleMessage(message)
@ -85,16 +128,19 @@ func handleMessage(message handler.MessageT) {
for _, subscribedTopic := range mapping.Topics {
// log.Printf("Testing %s in %s", message.Topic, subscribedTopic)
if mqtt.TopicMatchesSubscription(message.Topic, subscribedTopic) {
log.Printf("Handle message in handler %s", mapping.Handler)
handler, exists := handlerMap[mapping.Handler]
//log.Printf("Handle message in handler %s", mapping.Id)
handler, exists := handlerMap[mapping.Id]
if exists {
handler.Handle(message)
counter.S("Dispatched")
return
} else {
log.Printf("Handler %s not found, message %s is lost", mapping.Handler, message)
log.Printf("Handler %s not found, message %s is lost", mapping.Id, message)
counter.F("Dispatched")
}
}
}
}
log.Printf("No match for topic %s, message %s is lost", message.Topic, message)
counter.F("Dispatched")
}

View File

@ -5,6 +5,7 @@ go 1.21.3
require (
github.com/eclipse/paho.mqtt.golang v1.4.3
github.com/google/uuid v1.4.0
github.com/oliveagle/jsonpath v0.0.0-20180606110733-2e52cf6e6852
gorm.io/driver/postgres v1.5.4
gorm.io/gorm v1.25.5
)
@ -16,8 +17,8 @@ require (
github.com/jackc/pgx/v5 v5.4.3 // indirect
github.com/jinzhu/inflection v1.0.0 // indirect
github.com/jinzhu/now v1.1.5 // indirect
golang.org/x/crypto v0.14.0 // indirect
golang.org/x/net v0.10.0 // indirect
golang.org/x/crypto v0.19.0 // indirect
golang.org/x/net v0.20.0 // indirect
golang.org/x/sync v0.1.0 // indirect
golang.org/x/text v0.13.0 // indirect
golang.org/x/text v0.14.0 // indirect
)

View File

@ -17,6 +17,8 @@ github.com/jinzhu/inflection v1.0.0 h1:K317FqzuhWc8YvSVlFMCCUb36O/S9MCKRDI7QkRKD
github.com/jinzhu/inflection v1.0.0/go.mod h1:h+uFLlag+Qp1Va5pdKtLDYj+kHp5pxUVkryuEj+Srlc=
github.com/jinzhu/now v1.1.5 h1:/o9tlHleP7gOFmsnYNz3RGnqzefHA47wQpKrrdTIwXQ=
github.com/jinzhu/now v1.1.5/go.mod h1:d3SSVoowX0Lcu0IBviAWJpolVfI5UJVZZ7cO71lE/z8=
github.com/oliveagle/jsonpath v0.0.0-20180606110733-2e52cf6e6852 h1:Yl0tPBa8QPjGmesFh1D0rDy+q1Twx6FyU7VWHi8wZbI=
github.com/oliveagle/jsonpath v0.0.0-20180606110733-2e52cf6e6852/go.mod h1:eqOVx5Vwu4gd2mmMZvVZsgIqNSaW3xxRThUJ0k/TPk4=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
@ -26,12 +28,18 @@ github.com/stretchr/testify v1.8.1 h1:w7B6lhMri9wdJUVmEZPGGhZzrYTPvgJArz7wNPgYKs
github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
golang.org/x/crypto v0.14.0 h1:wBqGXzWJW6m1XrIKlAH0Hs1JJ7+9KBwnIO8v66Q9cHc=
golang.org/x/crypto v0.14.0/go.mod h1:MVFd36DqK4CsrnJYDkBA3VC4m2GkXAM0PvzMCn4JQf4=
golang.org/x/crypto v0.19.0 h1:ENy+Az/9Y1vSrlrvBSyna3PITt4tiZLf7sgCjZBX7Wo=
golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU=
golang.org/x/net v0.10.0 h1:X2//UzNDwYmtCLn7To6G58Wr6f5ahEAQgKNzv9Y951M=
golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg=
golang.org/x/net v0.20.0 h1:aCL9BSgETF1k+blQaYUBx9hJ9LOGP3gAVemcZlf1Kpo=
golang.org/x/net v0.20.0/go.mod h1:z8BVo6PvndSri0LbOE3hAn0apkU+1YvI6E70E9jsnvY=
golang.org/x/sync v0.1.0 h1:wsuoTGHzEhffawBOhz5CYhcrV4IdKZbEyZjBMuTp12o=
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/text v0.13.0 h1:ablQoSUd0tRdKxZewP80B+BaqeKJuVhuRxj/dkrun3k=
golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ=
golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=

View File

@ -0,0 +1,80 @@
package dt1t
import (
"log"
"fmt"
"time"
"strconv"
"udi/handlers/handler"
"udi/database"
"udi/config"
)
type Dt1tHandler struct {
handler.CommonHandler
ready bool
label string
dbh *database.DatabaseHandle
application string
device string
}
func New(id string, config config.HandlerConfigT) handler.Handler {
t := &Dt1tHandler {
}
if config.Attributes["Application"] == "" {
log.Println("Error: application not configured")
return t
}
t.application = config.Attributes["Application"]
if config.Attributes["Device"] == "" {
log.Println("Error: device not configured")
return t
}
t.device = config.Attributes["Device"]
t.Id = id
t.dbh = database.NewDatabaseHandle()
t.ready = true
return t
}
func (self *Dt1tHandler) Handle(message handler.MessageT) {
if ! self.ready {
self.Lost("Handler is not marked as ready", nil, message)
return
}
// log.Printf("Handler DT1T %d processing %s -> %s", self.id, message.Topic, message.Payload)
temperature, err := strconv.Atoi(message.Payload)
if err != nil {
self.Lost("Invalid raw value", err, message)
return
}
if temperature & 0x8000 != 0{
temperature = ((temperature - 1) ^ 0xffff) * -1
}
temperatureF := float32(temperature) / 10.0
var measurement database.Measurement
measurement.Time = time.Now()
measurement.Application = self.application
measurement.Device = self.device
var variable database.VariableType
variable.Label = "Temperature"
variable.Variable = ""
variable.Unit = "°C"
variable.Value = fmt.Sprintf("%f", temperatureF)
measurement.Values = make(map[string]database.VariableType)
measurement.Values["Value"] = variable
// log.Printf("Prepared measurement item: %s", measurement)
self.dbh.StoreMeasurement(&measurement)
self.S()
}

View File

@ -1,6 +1,10 @@
package handler
import "time"
import (
"time"
"log"
"udi/counter"
)
type MessageT struct {
Timestamp time.Time
@ -9,6 +13,34 @@ type MessageT struct {
}
type Handler interface {
GetId() string
Handle(MessageT)
Lost(msg string, err error, message MessageT)
S()
F()
}
type CommonHandler struct {
Id string
}
func (self *CommonHandler) S() {
counter.SH(self.Id)
}
func (self *CommonHandler) F() {
counter.FH(self.Id)
}
func (self *CommonHandler) GetId() string {
return self.Id
}
func (self *CommonHandler) Lost(msg string, err error, message MessageT) {
if err != nil {
log.Printf("Error: %s, message %s is lost", msg, message)
} else {
log.Printf("Error: %s (%s), message %s is lost", msg, err, message)
}
self.F()
}

View File

@ -1,24 +1,23 @@
package iot
import "log"
import "udi/config"
import "udi/handlers/handler"
var idSeq int = 0
type IoTHandler struct {
id int
handler.CommonHandler
}
func NewIoTHandler() *IoTHandler {
func New(id string, config config.HandlerConfigT) handler.Handler {
t := &IoTHandler {
id: idSeq,
}
idSeq += 1
t.Id = id
return t
}
func (self *IoTHandler) Handle(message handler.MessageT) {
log.Printf("Handler IoT %d processing %s -> %s", self.id, message.Topic, message.Payload)
log.Printf("Handler IoT %d processing %s -> %s", self.Id, message.Topic, message.Payload)
}

View File

@ -0,0 +1,73 @@
package locative
import (
"reflect"
"time"
"log"
"encoding/json"
"udi/config"
"udi/handlers/handler"
"udi/database"
)
type LocativeHandler struct {
handler.CommonHandler
dbh *database.DatabaseHandle
}
type locativeEvent struct {
Trigger string `json:"trigger"`
Device string `json:"device"`
Location string `json:"location"`
Latitude string `json:"latitude"`
Longitude string `json:"longitude"`
Person string `json:"person"`
Timestamp string `json:"timestamp"`
}
func New(id string, config config.HandlerConfigT) handler.Handler {
t := &LocativeHandler {
}
t.Id = id
t.dbh = database.NewDatabaseHandle()
return t
}
func (self *LocativeHandler) Handle(message handler.MessageT) {
log.Printf("Handler Locative %d processing %s -> %s", self.Id, message.Topic, message.Payload)
var locativeEvent locativeEvent
err := json.Unmarshal([]byte(message.Payload), &locativeEvent)
if err != nil {
self.Lost("Unable to parse payload into locativeEvent struct", err, message)
return
}
variables := make(map[string]database.VariableType)
locativeEventStructValue := reflect.ValueOf(locativeEvent)
for i := 0; i < locativeEventStructValue.NumField(); i++ {
field := locativeEventStructValue.Type().Field(i)
fieldValue := locativeEventStructValue.Field(i)
v := database.VariableType {
Label: "",
Variable: field.Name,
Unit: "",
Value: fieldValue.Interface(),
}
variables[field.Name] = v
}
measurement := database.Measurement {
Time: time.Now(),
Application: "Locative",
Device: locativeEvent.Person,
Values: variables,
}
self.dbh.StoreMeasurement(&measurement)
self.S()
}

View File

@ -1,8 +1,6 @@
package mbgw3
import (
"log"
//"reflect"
"time"
"strconv"
"encoding/json"
@ -12,10 +10,8 @@ import (
)
var idSeq int = 0
type Mbgw3Handler struct {
id int
handler.CommonHandler
dbh *database.DatabaseHandle
}
@ -30,22 +26,21 @@ type Observation struct {
}
func NewMbgw3Handler(config config.HandlerConfigT) *Mbgw3Handler {
func New(id string, config config.HandlerConfigT) handler.Handler {
t := &Mbgw3Handler {
id: idSeq,
}
idSeq += 1
t.dbh = database.NewDatabaseHandle(config.DatabaseConnStr)
t.Id = id
t.dbh = database.NewDatabaseHandle()
return t
}
func (self *Mbgw3Handler) Handle(message handler.MessageT) {
// log.Printf("Handler MBGW3 %d processing %s -> %s", self.id, message.Topic, message.Payload)
//log.Printf("Handler MBGW3 %d processing %s -> %s", self.Id, message.Topic, message.Payload)
var observation Observation
err := json.Unmarshal([]byte(message.Payload), &observation)
if err != nil {
log.Printf("Unable to parse payload into Observation struct, message %s -> %s is lost, error ", message.Topic, message.Payload, err)
self.Lost("Unable to parse payload into Observation struct", err, message)
return
}
@ -90,9 +85,10 @@ func (self *Mbgw3Handler) Handle(message handler.MessageT) {
}
}
// log.Printf("Prepared measurement item: %s", measurement)
//log.Printf("Prepared measurement item: %s", measurement)
self.dbh.StoreMeasurement(&measurement)
self.S()
}

View File

@ -1,7 +1,6 @@
package pv
import (
"log"
"reflect"
"time"
"encoding/json"
@ -11,10 +10,8 @@ import (
)
var idSeq int = 0
type PvHandler struct {
id int
handler.CommonHandler
dbh *database.DatabaseHandle
}
@ -39,22 +36,22 @@ type PvValue struct {
}
func NewPvHandler(config config.HandlerConfigT) *PvHandler {
func New(id string, config config.HandlerConfigT) handler.Handler {
t := &PvHandler {
id: idSeq,
}
idSeq += 1
t.dbh = database.NewDatabaseHandle(config.DatabaseConnStr)
t.Id = id
t.dbh = database.NewDatabaseHandle()
return t
}
func (self *PvHandler) Handle(message handler.MessageT) {
//log.Printf("Handler PV %d processing %s -> %s", self.id, message.Topic, message.Payload)
var pvValue PvValue
err := json.Unmarshal([]byte(message.Payload), &pvValue)
if err != nil {
log.Printf("Unable to parse payload into pvValue struct, message %s -> %s is lost, error: %s", message.Topic, message.Payload, err)
self.Lost("Unable to parse payload into pvValue struct", err, message)
return
}
@ -80,6 +77,7 @@ func (self *PvHandler) Handle(message handler.MessageT) {
}
self.dbh.StoreMeasurement(&measurement)
self.S()
}

View File

@ -0,0 +1,75 @@
package snmp
import (
"time"
"log"
"encoding/json"
"udi/config"
"udi/handlers/handler"
"udi/database"
)
type SnmpHandler struct {
handler.CommonHandler
dbh *database.DatabaseHandle
}
type endpoint_t struct {
Label string `json:"label"`
Variable string `json:"variable"`
Value string `json:"value"`
}
type observation_t struct {
Device string `json:"device"`
Label string `json:"label"`
Variables map[string]endpoint_t `json:"variables"`
}
func New(id string, config config.HandlerConfigT) handler.Handler {
t := &SnmpHandler {
}
t.Id = id
t.dbh = database.NewDatabaseHandle()
return t
}
func (self *SnmpHandler) Handle(message handler.MessageT) {
log.Printf("Handler SNMP %d processing %s -> %s", self.Id, message.Topic, message.Payload)
var observation observation_t
err := json.Unmarshal([]byte(message.Payload), &observation)
if err != nil {
self.Lost("Unable to parse payload into Observation struct", err, message)
return
}
var measurement database.Measurement
measurement.Time = time.Now()
measurement.Application = "SNMP"
measurement.Device = observation.Device
measurement.Attributes = map[string]interface{} {
"Label": observation.Label,
}
measurement.Values = make(map[string]database.VariableType)
for k, v := range observation.Variables {
measurement.Values[k] = database.VariableType {
Label: v.Label,
Variable: v.Variable,
Unit: "",
Value: v.Value,
}
}
log.Printf("Prepared measurement item: %s", measurement)
self.dbh.StoreMeasurement(&measurement)
self.S()
}

View File

@ -0,0 +1,159 @@
package svej
import (
"log"
"time"
"strconv"
"strings"
"fmt"
"github.com/oliveagle/jsonpath"
"encoding/json"
"udi/config"
"udi/handlers/handler"
"udi/database"
)
type SingleValueExtractorJsonpathHandler struct {
handler.CommonHandler
ready bool
application string
deviceSelector string
valueSelector string
unitSelector string
deviceJsonpath *jsonpath.Compiled
valueJsonpath *jsonpath.Compiled
unitJsonpath *jsonpath.Compiled
dbh *database.DatabaseHandle
}
/*
Valid values for selectors:
J:JsonpathExpression
T:TopicPartIndex
C:ConstantValue
*/
func New(id string, config config.HandlerConfigT) handler.Handler {
t := &SingleValueExtractorJsonpathHandler {
ready: false,
}
if config.Attributes["application"] == "" {
log.Println("Error: application not configured")
return t
}
t.application = config.Attributes["application"]
t.deviceSelector = config.Attributes["deviceSelector"]
if t.deviceSelector[:2] == "J:" {
jp, err := jsonpath.Compile(t.deviceSelector[2:])
if err != nil {
log.Printf("Unable to compile deviceJsonpath: %s, %s", t.deviceSelector[2:], err)
return t
}
t.deviceJsonpath = jp
}
t.valueSelector = config.Attributes["valueSelector"]
if t.valueSelector[:2] == "J:" {
jp, err := jsonpath.Compile(t.valueSelector[2:])
if err != nil {
log.Printf("Unable to compile valueJsonpath: %s, %s", t.valueSelector[2:], err)
return t
}
t.valueJsonpath = jp
}
t.unitSelector = config.Attributes["unitSelector"]
if t.unitSelector[:2] == "J:" {
jp, err := jsonpath.Compile(t.unitSelector[2:])
if err != nil {
log.Printf("Unable to compile unitJsonpath: %s, %s", t.unitSelector[2:], err)
return t
}
t.unitJsonpath = jp
}
t.Id = id
t.ready = true
t.dbh = database.NewDatabaseHandle()
return t
}
func extractionHelper(subTopics []string, jPayload interface{}, selector string, jp *jsonpath.Compiled) (string, error) {
var res string
switch selector[:2] {
case "J:":
r, e := jp.Lookup(jPayload)
if e != nil {
return "", fmt.Errorf("jp.Lookup failed with %s", e)
}
res = fmt.Sprint(r)
case "T:":
i, e := strconv.Atoi(selector[2:])
if e != nil {
return "", fmt.Errorf("Atoi failed with %s", e)
}
if i >= len(subTopics) {
return "", fmt.Errorf("not enough subtopics")
}
res = subTopics[i]
case "C:":
res = selector[2:]
default:
return "", fmt.Errorf("Invalid selector: %s", selector[:2])
}
return res, nil
}
func (self *SingleValueExtractorJsonpathHandler) Handle(message handler.MessageT) {
if ! self.ready {
self.Lost("Handler is not marked as ready", nil, message)
return
}
//log.Printf("Handler SingleValueExtractorJsonpath %d processing %s -> %s", self.Id, message.Topic, message.Payload)
var measurement database.Measurement
measurement.Time = time.Now()
measurement.Application = self.application
subTopics := strings.Split(message.Topic, "/")
//log.Printf("Subtopics: %s", strings.Join(subTopics, ", "))
var jPayload interface{}
err := json.Unmarshal([]byte(message.Payload), &jPayload)
if err != nil {
self.Lost("Unable to unmarshal payload", err, message)
return
}
device, err1 := extractionHelper(subTopics, jPayload, self.deviceSelector, self.deviceJsonpath)
if err1 != nil {
self.Lost("Device extraction failed", err1, message)
return
}
value, err2 := extractionHelper(subTopics, jPayload, self.valueSelector, self.valueJsonpath)
if err2 != nil {
self.Lost("Value extraction failed", err2, message)
return
}
unit, err3 := extractionHelper(subTopics, jPayload, self.unitSelector, self.unitJsonpath)
if err3 != nil {
self.Lost("Unit extraction failed", err3, message)
return
}
measurement.Device = device
var variable database.VariableType
variable.Label = ""
variable.Variable = ""
variable.Unit = unit
variable.Value = value
measurement.Values = make(map[string]database.VariableType)
measurement.Values["Value"] = variable
//log.Printf("Prepared measurement item: %s", measurement)
self.dbh.StoreMeasurement(&measurement)
self.S()
}

View File

@ -1,20 +1,19 @@
package sve
package sver
import (
"log"
"time"
"strconv"
"strings"
"regexp"
"log"
"udi/config"
"udi/handlers/handler"
"udi/database"
)
var idSeq int = 0
type SingleValueExtractorHandler struct {
id int
type SingleValueExtractorRegexHandler struct {
handler.CommonHandler
ready bool
config localConfig
payloadRegex *regexp.Regexp
@ -23,6 +22,7 @@ type SingleValueExtractorHandler struct {
const TOPIC_SEL = "topic"
const PAYLOAD_SEL = "payload"
const PAYLOAD_FULL_SEL = "payload-full"
const CONSTANT_SEL = "constant"
type localConfig struct {
@ -38,13 +38,11 @@ type localConfig struct {
}
func NewSveHandler(config config.HandlerConfigT) *SingleValueExtractorHandler {
t := &SingleValueExtractorHandler {
id: idSeq,
func New(id string, config config.HandlerConfigT) handler.Handler {
t := &SingleValueExtractorRegexHandler {
ready: false,
}
idSeq += 1
var localConfig localConfig
if config.Attributes["application"] == "" {
log.Println("Error: application not configured")
@ -75,49 +73,50 @@ func NewSveHandler(config config.HandlerConfigT) *SingleValueExtractorHandler {
// empty device is valid
localConfig.device = config.Attributes["device"]
if config.Attributes["valueFrom"] != TOPIC_SEL && config.Attributes["valueFrom"] != PAYLOAD_SEL {
if config.Attributes["valueFrom"] != PAYLOAD_SEL && config.Attributes["valueFrom"] != PAYLOAD_FULL_SEL {
log.Printf("Error: invalid value %s for valueFrom", config.Attributes["valueFrom"])
return t
}
localConfig.valueFrom = config.Attributes["valueFrom"]
valuePart, err2 := strconv.Atoi(config.Attributes["valuePart"])
if err2 != nil {
log.Printf("Error: unable to convert valuePart to number: %s", err2)
return t
if config.Attributes["valueFrom"] == PAYLOAD_SEL {
valuePart, err2 := strconv.Atoi(config.Attributes["valuePart"])
if err2 != nil {
log.Printf("Error: unable to convert valuePart to number: %s", err2)
return t
}
localConfig.valuePart = valuePart
}
localConfig.valuePart = valuePart
if config.Attributes["unitFrom"] != TOPIC_SEL && config.Attributes["unitFrom"] != PAYLOAD_SEL && config.Attributes["unitFrom"] != CONSTANT_SEL {
if config.Attributes["unitFrom"] != PAYLOAD_SEL && config.Attributes["unitFrom"] != CONSTANT_SEL {
log.Printf("Error: invalid value %s for unitFrom", config.Attributes["unitFrom"])
return t
}
localConfig.unitFrom = config.Attributes["unitFrom"]
unitPart, err3 := strconv.Atoi(config.Attributes["unitPart"])
if err3 != nil {
log.Printf("Error: unable to convert unitPart to number: %s", err3)
return t
if config.Attributes["unitFrom"] == PAYLOAD_SEL {
unitPart, err3 := strconv.Atoi(config.Attributes["unitPart"])
if err3 != nil {
log.Printf("Error: unable to convert unitPart to number: %s", err3)
return t
}
localConfig.unitPart = unitPart
}
localConfig.unitPart = unitPart
// empty unit is valid
localConfig.unit = config.Attributes["unit"]
t.config = localConfig
t.Id = id
t.ready = true
t.dbh = database.NewDatabaseHandle(config.DatabaseConnStr)
t.dbh = database.NewDatabaseHandle()
return t
}
func lost(msg string, message handler.MessageT) {
log.Printf("Error: %s, message %s is lost", msg, message)
}
func (self *SingleValueExtractorHandler) Handle(message handler.MessageT) {
func (self *SingleValueExtractorRegexHandler) Handle(message handler.MessageT) {
if ! self.ready {
log.Println("Handler is not marked as ready, message %s is lost", message)
self.Lost("Handler is not marked as ready", nil, message)
return
}
//log.Printf("Handler SingleValueExtractor %d processing %s -> %s", self.id, message.Topic, message.Payload)
@ -138,17 +137,17 @@ func (self *SingleValueExtractorHandler) Handle(message handler.MessageT) {
switch self.config.deviceFrom {
case TOPIC_SEL:
if self.config.devicePart >= len(subTopics) {
lost("devicePart out of range", message)
self.Lost("devicePart out of range", nil, message)
return
}
measurement.Device = subTopics[self.config.devicePart]
case PAYLOAD_SEL:
if self.payloadRegex == nil {
lost("no payloadRegex defined, devicePart can't be used", message)
self.Lost("no payloadRegex defined, devicePart can't be used", nil, message)
return
}
if self.config.devicePart >= len(subTopics) {
lost("devicePart out of range", message)
if self.config.devicePart >= len(payloadMatches) {
self.Lost("devicePart out of range", nil, message)
return
}
measurement.Device = payloadMatches[self.config.devicePart]
@ -162,38 +161,28 @@ func (self *SingleValueExtractorHandler) Handle(message handler.MessageT) {
variable.Variable = ""
switch self.config.valueFrom {
case TOPIC_SEL:
if self.config.valuePart >= len(subTopics) {
lost("valuePart out of range", message)
return
}
variable.Value = subTopics[self.config.valuePart]
case PAYLOAD_SEL:
if self.payloadRegex == nil {
lost("no payloadRegex defined, valuePart can't be used", message)
self.Lost("no payloadRegex defined, valuePart can't be used", nil, message)
return
}
if self.config.valuePart >= len(subTopics) {
lost("valuePart out of range", message)
if self.config.valuePart >= len(payloadMatches) {
self.Lost("valuePart out of range", nil, message)
return
}
variable.Value = payloadMatches[self.config.valuePart]
case PAYLOAD_FULL_SEL:
variable.Value = message.Payload
}
switch self.config.unitFrom {
case TOPIC_SEL:
if self.config.unitPart >= len(subTopics) {
lost("unitPart out of range", message)
return
}
variable.Unit = subTopics[self.config.unitPart]
case PAYLOAD_SEL:
if self.payloadRegex == nil {
lost("no payloadRegex defined, unitPart can't be used", message)
self.Lost("no payloadRegex defined, unitPart can't be used", nil, message)
return
}
if self.config.unitPart >= len(subTopics) {
lost("unitPart out of range", message)
if self.config.unitPart >= len(payloadMatches) {
self.Lost("unitPart out of range", nil, message)
return
}
variable.Unit = payloadMatches[self.config.unitPart]
@ -205,5 +194,6 @@ func (self *SingleValueExtractorHandler) Handle(message handler.MessageT) {
//log.Printf("Prepared measurement item: %s", measurement)
self.dbh.StoreMeasurement(&measurement)
self.S()
}

View File

@ -0,0 +1,89 @@
package draginoLdds75
import (
"fmt"
// "log"
"strings"
"strconv"
"encoding/json"
"udi/database"
)
/*
"decoded_payload": {
"Bat": 3.299,
"Distance": "352 mm",
"Interrupt_flag": 0,
"Sensor_flag": 1,
"TempC_DS18B20": "0.00"
},
*/
type message struct {
Bat float32 `json:"Bat"`
Distance string `json:"Distance"`
Interrupt_flag int `json:"Interrupt_flag"`
Sensor_flag int `json:"Sensor_flag"`
TempC_DS18B20 string `json:"TempC_DS18B20"`
}
func Parse(fPort int, decodedPayload []byte, _ string, variables *map[string]database.VariableType, attributes *map[string]interface{}, device *database.Device) error {
if fPort != 2 {
return fmt.Errorf("Unexpected fPort %d", fPort)
}
var message message
err := json.Unmarshal(decodedPayload, &message)
if err != nil {
return fmt.Errorf("Unable to parse payload, fPort %d, error %s", fPort, err)
}
distanceParts := strings.Split(message.Distance, " ")
if len(distanceParts) != 2 && distanceParts[1] != "mm" {
return fmt.Errorf("Invalid format for distance value: %s", message.Distance)
}
distance, err2 := strconv.Atoi(distanceParts[0])
if err2 != nil {
return fmt.Errorf("Distance value is no number: %s -> %s", message.Distance, err2)
}
(*variables)["Battery"] = database.VariableType {
Label: "Battery",
Variable: "Voltage",
Unit: "V",
Value: message.Bat,
}
(*variables)["Distance"] = database.VariableType {
Label: "Distance",
Variable: "Level",
Unit: "mm",
Value: distance,
}
if distance == 20 {
(*attributes)["Status"] = "invalid value"
} else if distance == 0 {
(*attributes)["Status"] = "no sensor detected"
} else {
(*attributes)["Status"] = "Ok"
}
groundLevelI, exists := device.Attributes["GroundLevel"]
groundLevelS, ok := groundLevelI.(string)
groundLevel, err3 := strconv.Atoi(groundLevelS)
if exists && err3 == nil && ok {
//log.Println("add corrected distance")
correctedDistance := groundLevel - distance
(*variables)["CorrectedDistance"] = database.VariableType {
Label: "CorrectedDistance",
Variable: "Level",
Unit: "mm",
Value: correctedDistance,
}
} /* else {
log.Printf("no ground level: %s %s %s", exists, err3, ok)
log.Printf("Device: %s", device)
log.Printf("Attributes: %s", device.Attributes)
} */
return nil
}

View File

@ -0,0 +1,83 @@
{
"end_device_ids": {
"device_id": "eui-a84041a14185f67f",
"application_ids": {
"application_id": "de-hottis-level-monitoring"
},
"dev_eui": "A84041A14185F67F",
"join_eui": "A840410000000101",
"dev_addr": "260B3987"
},
"correlation_ids": [
"gs:uplink:01HHP8EVC1N78FGTNBTNZYGCVY"
],
"received_at": "2023-12-15T08:11:04.142793605Z",
"uplink_message": {
"session_key_id": "AYZ/cI2YeDLCZr4urRDzCw==",
"f_port": 2,
"f_cnt": 27758,
"frm_payload": "DOMBYAAAAAE=",
"decoded_payload": {
"Bat": 3.299,
"Distance": "352 mm",
"Interrupt_flag": 0,
"Sensor_flag": 1,
"TempC_DS18B20": "0.00"
},
"rx_metadata": [
{
"gateway_ids": {
"gateway_id": "eui-00005813d35e3021",
"eui": "00005813D35E3021"
},
"timestamp": 3654294763,
"rssi": -85,
"channel_rssi": -85,
"snr": 8.8,
"location": {
"latitude": 52.17065267448476,
"longitude": 7.629437184774199,
"source": "SOURCE_REGISTRY"
},
"uplink_token": "CiIKIAoUZXVpLTAwMDA1ODEzZDM1ZTMwMjESCAAAWBPTXjAhEOu5wM4NGgwIl5TwqwYQ5Z/7vgMg+OvDp63eBA==",
"channel_index": 2,
"received_at": "2023-12-15T08:11:03.937349093Z"
}
],
"settings": {
"data_rate": {
"lora": {
"bandwidth": 125000,
"spreading_factor": 7,
"coding_rate": "4/5"
}
},
"frequency": "867500000",
"timestamp": 3654294763
},
"received_at": "2023-12-15T08:11:03.938112475Z",
"consumed_airtime": "0.056576s",
"locations": {
"user": {
"latitude": 52.1710648323742,
"longitude": 7.62751003482794,
"altitude": 37,
"source": "SOURCE_REGISTRY"
}
},
"version_ids": {
"brand_id": "dragino",
"model_id": "ldds75",
"hardware_version": "_unknown_hw_version_",
"firmware_version": "1.1.4",
"band_id": "EU_863_870"
},
"network_ids": {
"net_id": "000013",
"ns_id": "EC656E0000000181",
"tenant_id": "ttn",
"cluster_id": "eu1",
"cluster_address": "eu1.cloud.thethings.network"
}
}
}

View File

@ -0,0 +1,90 @@
package draginoLmds200
import (
"fmt"
"strconv"
"encoding/json"
"udi/database"
)
/*
"decoded_payload": {
"Bat": 3.082,
"DALARM_count": 0,
"Distance_alarm": 0,
"Interrupt_alarm": 0,
"dis1": 105,
"dis2": 201
},
*/
type message struct {
Bat float32 `json:"Bat"`
DALARM_count int `json:"DALARM_count"`
Distance_alarm int `json:"Distance_alarm"`
Interrupt_alarm int `json:"Interrupt_alarm"`
Dis1 int `json:"dis1"`
Dis2 int `json:"dis2"`
}
func Parse(fPort int, decodedPayload []byte, _ string, variables *map[string]database.VariableType, attributes *map[string]interface{}, device *database.Device) error {
if fPort != 2 {
return fmt.Errorf("Unexpected fPort %d", fPort)
}
var message message
err := json.Unmarshal(decodedPayload, &message)
if err != nil {
return fmt.Errorf("Unable to parse payload, fPort %d, error %s", fPort, err)
}
(*variables)["Battery"] = database.VariableType {
Label: "Battery",
Variable: "Voltage",
Unit: "V",
Value: message.Bat,
}
distance1 := message.Dis1 * 10
(*variables)["Distance1"] = database.VariableType {
Label: "Distance1",
Variable: "Level",
Unit: "mm",
Value: distance1,
}
distance2 := message.Dis2 * 10
(*variables)["Distance2"] = database.VariableType {
Label: "Distance2",
Variable: "Level",
Unit: "mm",
Value: distance2,
}
if distance1 == 2 {
(*attributes)["Status"] = "invalid value"
} else if distance1 == 1 {
(*attributes)["Status"] = "no sensor detected"
} else {
(*attributes)["Status"] = "Ok"
}
groundLevelI, exists := device.Attributes["GroundLevel"]
groundLevelS, ok := groundLevelI.(string)
groundLevel, err3 := strconv.Atoi(groundLevelS)
if exists && err3 == nil && ok {
correctedDistance1 := groundLevel - distance1
(*variables)["CorrectedDistance1"] = database.VariableType {
Label: "CorrectedDistance1",
Variable: "Level",
Unit: "mm",
Value: correctedDistance1,
}
correctedDistance2 := groundLevel - distance2
(*variables)["CorrectedDistance2"] = database.VariableType {
Label: "CorrectedDistance2",
Variable: "Level",
Unit: "mm",
Value: correctedDistance2,
}
}
return nil
}

View File

@ -0,0 +1,84 @@
{
"end_device_ids": {
"device_id": "eui-a840419641867eb5",
"application_ids": {
"application_id": "de-hottis-level-monitoring"
},
"dev_eui": "A840419641867EB5",
"join_eui": "A840410000000101",
"dev_addr": "260B91F9"
},
"correlation_ids": [
"gs:uplink:01HH1R112BNDQQ52N9FVV0TKPW"
],
"received_at": "2023-12-07T08:59:05.369293395Z",
"uplink_message": {
"session_key_id": "AYa9JUhNJp00t+hKqkQUog==",
"f_port": 2,
"f_cnt": 25665,
"frm_payload": "DAoAaQDJAA==",
"decoded_payload": {
"Bat": 3.082,
"DALARM_count": 0,
"Distance_alarm": 0,
"Interrupt_alarm": 0,
"dis1": 105,
"dis2": 201
},
"rx_metadata": [
{
"gateway_ids": {
"gateway_id": "eui-00005813d35e3021",
"eui": "00005813D35E3021"
},
"timestamp": 1141271036,
"rssi": -100,
"channel_rssi": -100,
"snr": 7.3,
"location": {
"latitude": 52.17065267448476,
"longitude": 7.629437184774199,
"source": "SOURCE_REGISTRY"
},
"uplink_token": "CiIKIAoUZXVpLTAwMDA1ODEzZDM1ZTMwMjESCAAAWBPTXjAhEPzTmaAEGgsI2ZLGqwYQnfLnTSDggLjIm5IF",
"channel_index": 6,
"received_at": "2023-12-07T08:59:05.163182877Z"
}
],
"settings": {
"data_rate": {
"lora": {
"bandwidth": 125000,
"spreading_factor": 7,
"coding_rate": "4/5"
}
},
"frequency": "868300000",
"timestamp": 1141271036
},
"received_at": "2023-12-07T08:59:05.163964824Z",
"consumed_airtime": "0.056576s",
"locations": {
"user": {
"latitude": 52.1707216912195,
"longitude": 7.63066603211241,
"altitude": 39,
"source": "SOURCE_REGISTRY"
}
},
"version_ids": {
"brand_id": "dragino",
"model_id": "lmds200",
"hardware_version": "_unknown_hw_version_",
"firmware_version": "1.0",
"band_id": "EU_863_870"
},
"network_ids": {
"net_id": "000013",
"ns_id": "EC656E0000000181",
"tenant_id": "ttn",
"cluster_id": "eu1",
"cluster_address": "eu1.cloud.thethings.network"
}
}
}

View File

@ -0,0 +1,63 @@
package draginoLse01
import (
"fmt"
"encoding/json"
"udi/database"
)
/*
{
"Bat":3.211,
"TempC_DS18B20":"0.0",
"conduct_SOIL":32,
"temp_SOIL":"7.56",
"water_SOIL":"25.92"
}
*/
type message struct {
Bat float32 `json:"Bat"`
TempC_DS18B20 string `json:"TempC_DS18B20"`
Conduct_SOIL int `json:"conduct_SOIL"`
Temp_SOIL string `json:"temp_SOIL"`
Water_SOIL string `json:"water_SOIL"`
}
func Parse(fPort int, decodedPayload []byte, _ string, variables *map[string]database.VariableType, _ *map[string]interface{}, _ *database.Device) error {
if fPort != 2 {
return fmt.Errorf("Unexpected fPort %d", fPort)
}
var message message
err := json.Unmarshal(decodedPayload, &message)
if err != nil {
return fmt.Errorf("Unable to parse payload, fPort %d, error %s", fPort, err)
}
(*variables)["Battery"] = database.VariableType {
Label: "Battery",
Variable: "Voltage",
Unit: "V",
Value: message.Bat,
}
(*variables)["Conductance"] = database.VariableType {
Label: "Conductance",
Variable: "Conductance",
Unit: "uS/cm",
Value: message.Conduct_SOIL,
}
(*variables)["Temperature"] = database.VariableType {
Label: "Temperature",
Variable: "Temperature",
Unit: "°C",
Value: message.Temp_SOIL,
}
(*variables)["Water"] = database.VariableType {
Label: "Water",
Variable: "Water",
Unit: "%",
Value: message.Water_SOIL,
}
return nil
}

View File

@ -0,0 +1,74 @@
package draginoLsn50
import (
"fmt"
"encoding/json"
"udi/database"
)
/*
"decoded_payload": {
"ALARM_status": "FALSE",
"BatV": 3.659,
"Temp_Black": 3276.7,
"Temp_Red": 22.6,
"Temp_White": 3276.7,
"Work_mode": "DS18B20"
},
*/
type message struct {
ALARM_status string `json:"ALARM_status"`
Bat float32 `json:"BatV"`
Work_mode string `json:"Work_mode"`
Temp_Black string `json:"Temp_Black"`
Temp_Red string `json:"Temp_Red"`
Temp_White string `json:"Temp_White"`
}
func Parse(fPort int, decodedPayload []byte, _ string, variables *map[string]database.VariableType, attributes *map[string]interface{}, device *database.Device) error {
if fPort != 2 {
return fmt.Errorf("Unexpected fPort %d", fPort)
}
var message message
err := json.Unmarshal(decodedPayload, &message)
if err != nil {
return fmt.Errorf("Unable to parse payload, fPort %d, error %s", fPort, err)
}
(*variables)["Battery"] = database.VariableType {
Label: "Battery",
Variable: "Voltage",
Unit: "V",
Value: message.Bat,
}
(*variables)["Alarm"] = database.VariableType {
Label: "Alarm",
Variable: "Alarm",
Unit: "",
Value: message.ALARM_status,
}
(*variables)["Temp_Red"] = database.VariableType {
Label: "Temp_Red",
Variable: "Temperature",
Unit: "°C",
Value: message.Temp_Red,
}
(*variables)["Temp_Black"] = database.VariableType {
Label: "Temp_Black",
Variable: "Temperature",
Unit: "°C",
Value: message.Temp_Black,
}
(*variables)["Temp_White"] = database.VariableType {
Label: "Temp_White",
Variable: "Temperature",
Unit: "°C",
Value: message.Temp_White,
}
(*attributes)["Status"] = "Ok"
return nil
}

View File

@ -0,0 +1,124 @@
{
"end_device_ids": {
"device_id": "eui-102ceffffe01089c",
"application_ids": {
"application_id": "com-passavant-geiger-poc"
},
"dev_eui": "102CEFFFFE01089C",
"join_eui": "102CEF0000000000",
"dev_addr": "260B0E1A"
},
"correlation_ids": [
"gs:uplink:01HHF7YF14Y7HQBF9D8N8D20ZM"
],
"received_at": "2023-12-12T14:47:26.197129491Z",
"uplink_message": {
"session_key_id": "AYxJcJyrJgr7XiIUdO3EBA==",
"f_port": 1,
"f_cnt": 11738,
"frm_payload": "7HF4ZQsAAAAADAAAAAANAAAAAA4AAAAAFwAYABkA8ADc",
"decoded_payload": {
"Active Power L1": {
"cfgphase": 1,
"unit": "W",
"value": 0
},
"Active Power L123": {
"unit": "W",
"value": 0
},
"Active Power L2": {
"cfgphase": 2,
"unit": "W",
"value": 0
},
"Active Power L3": {
"cfgphase": 3,
"unit": "W",
"value": 0
},
"Powerfactor L1": {
"cfgphase": 1,
"unit": "Cos",
"value": 0,
"value_raw": 0
},
"Powerfactor L2": {
"cfgphase": 2,
"unit": "Cos",
"value": 0,
"value_raw": 0
},
"Powerfactor L3": {
"cfgphase": 3,
"unit": "Cos",
"value": 0,
"value_raw": 0
},
"errorcode": {
"CTRatioChange": false,
"ImpulseRatioChange": false,
"ImpulseWidthChange": false,
"LogbookFull": false,
"PowerFail": false,
"TimeChanged": false,
"VTRatioChange": false,
"value": 0
},
"medium": {
"desc": "Electricity",
"type": 1
},
"timeStamp": 1702392300
},
"rx_metadata": [
{
"gateway_ids": {
"gateway_id": "eui-b827ebfffe8b01dd",
"eui": "B827EBFFFE8B01DD"
},
"time": "2023-12-12T14:47:25.951668977Z",
"timestamp": 3710351237,
"rssi": -89,
"channel_rssi": -89,
"snr": 14,
"location": {
"latitude": 51.404164272478724,
"longitude": 7.060088589208832,
"source": "SOURCE_REGISTRY"
},
"uplink_token": "CiIKIAoUZXVpLWI4MjdlYmZmZmU4YjAxZGQSCLgn6//+iwHdEIXvnekNGgwI/eThqwYQpoGI1wMgiL+rkf5r",
"received_at": "2023-12-12T14:47:25.969479115Z"
}
],
"settings": {
"data_rate": {
"lora": {
"bandwidth": 125000,
"spreading_factor": 7,
"coding_rate": "4/5"
}
},
"frequency": "867100000",
"timestamp": 3710351237,
"time": "2023-12-12T14:47:25.951668977Z"
},
"received_at": "2023-12-12T14:47:25.988957776Z",
"confirmed": true,
"consumed_airtime": "0.092416s",
"version_ids": {
"brand_id": "emu",
"model_id": "emu-prof-ii",
"hardware_version": "1.0",
"firmware_version": "1.0",
"band_id": "EU_863_870"
},
"network_ids": {
"net_id": "000013",
"ns_id": "EC656E0000000181",
"tenant_id": "ttn",
"cluster_id": "eu1",
"cluster_address": "eu1.cloud.thethings.network"
}
}
}

View File

@ -0,0 +1,93 @@
{
"end_device_ids": {
"device_id": "eui-102ceffffe01089c",
"application_ids": {
"application_id": "com-passavant-geiger-poc"
},
"dev_eui": "102CEFFFFE01089C",
"join_eui": "102CEF0000000000",
"dev_addr": "260B0E1A"
},
"correlation_ids": [
"gs:uplink:01HH53T0RPG7QFT6N267J3Q2PA"
],
"received_at": "2023-12-08T16:22:41.900339885Z",
"uplink_message": {
"session_key_id": "AYxJcJyrJgr7XiIUdO3EBA==",
"f_port": 2,
"f_cnt": 374,
"frm_payload": "BEFzZSQAAAAAAAAAACYAAAAAAAAAACgAAAAAAAAAACoAAAAAAAAAAMk=",
"decoded_payload": {
"Active Energy Export T1 64bit": {
"unit": "Wh",
"value": 0
},
"Active Energy Import T1 64bit": {
"unit": "Wh",
"value": 0
},
"Reactive Energy Export T1 64bit": {
"unit": "varh",
"value": 0
},
"Reactive Energy Import T1 64bit": {
"unit": "varh",
"value": 0
},
"medium": {
"desc": "Electricity",
"type": 1
},
"timeStamp": 1702052100
},
"rx_metadata": [
{
"gateway_ids": {
"gateway_id": "eui-b827ebfffe8b01dd",
"eui": "B827EBFFFE8B01DD"
},
"time": "2023-12-08T16:22:40.969499111Z",
"timestamp": 383972090,
"rssi": -93,
"channel_rssi": -93,
"snr": 12.75,
"location": {
"latitude": 51.404164272478724,
"longitude": 7.060088589208832,
"source": "SOURCE_REGISTRY"
},
"uplink_token": "CiIKIAoUZXVpLWI4MjdlYmZmZmU4YjAxZGQSCLgn6//+iwHdEPrli7cBGgwI0YXNqwYQoteqxwIgkMGUtJb+pwI=",
"received_at": "2023-12-08T16:22:41.668146811Z"
}
],
"settings": {
"data_rate": {
"lora": {
"bandwidth": 125000,
"spreading_factor": 7,
"coding_rate": "4/5"
}
},
"frequency": "867700000",
"timestamp": 383972090,
"time": "2023-12-08T16:22:40.969499111Z"
},
"received_at": "2023-12-08T16:22:41.687510109Z",
"confirmed": true,
"consumed_airtime": "0.102656s",
"version_ids": {
"brand_id": "emu",
"model_id": "emu-prof-ii",
"hardware_version": "1.0",
"firmware_version": "1.0",
"band_id": "EU_863_870"
},
"network_ids": {
"net_id": "000013",
"ns_id": "EC656E0000000181",
"tenant_id": "ttn",
"cluster_id": "eu1",
"cluster_address": "eu1.cloud.thethings.network"
}
}
}

View File

@ -0,0 +1,257 @@
package emuProfIILoRaCfg1
// provisioning device with
// f_port=1: 01 00 0A 0B 0C 0D 0E 17 18 19 F0 4C
// 01 00 08 0B 0C 0D 0E 17 18 19 F0 BE
// f_port=2: 01 00 0A 24 26 28 2A 7A
// 01 00 08 24 26 28 2A BE
import (
"fmt"
//"log"
"encoding/json"
"udi/database"
)
/*
{
"Active Energy Export T1 64bit": {
"unit": "Wh",
"value": 0
},
"Active Energy Import T1 64bit": {
"unit": "Wh",
"value": 0
},
"Reactive Energy Export T1 64bit": {
"unit": "varh",
"value": 0
},
"Reactive Energy Import T1 64bit": {
"unit": "varh",
"value": 0
},
"medium": {
"desc": "Electricity",
"type": 1
},
"timeStamp": 1702052100
}
*/
type emuMessage2 struct {
ActiveEnergyExport struct {
Value int `json:"value"`
Unit string `json:"unit"`
} `json:"Active Energy Export T1 64bit"`
ReactiveEnergyExport struct {
Value int `json:"value"`
Unit string `json:"unit"`
} `json:"Reactive Energy Export T1 64bit"`
ActiveEnergyImport struct {
Value int `json:"value"`
Unit string `json:"unit"`
} `json:"Active Energy Import T1 64bit"`
ReactiveEnergyImport struct {
Value int `json:"value"`
Unit string `json:"unit"`
} `json:"Reactive Energy Import T1 64bit"`
Medium struct {
Desc string `json:"desc"`
Type int `json:"type"`
} `json:"medium"`
Timestamp int `json:"timestamp"`
}
/*
{
"Active Power L1": {
"cfgphase": 1,
"unit": "W",
"value": 0
},
"Active Power L123": {
"unit": "W",
"value": 0
},
"Active Power L2": {
"cfgphase": 2,
"unit": "W",
"value": 0
},
"Active Power L3": {
"cfgphase": 3,
"unit": "W",
"value": 0
},
"Powerfactor L1": {
"cfgphase": 1,
"unit": "Cos",
"value": 0,
"value_raw": 0
},
"Powerfactor L2": {
"cfgphase": 2,
"unit": "Cos",
"value": 0,
"value_raw": 0
},
"Powerfactor L3": {
"cfgphase": 3,
"unit": "Cos",
"value": 0,
"value_raw": 0
},
"errorcode": {
"CTRatioChange": false,
"ImpulseRatioChange": false,
"ImpulseWidthChange": false,
"LogbookFull": false,
"PowerFail": false,
"TimeChanged": false,
"VTRatioChange": false,
"value": 0
},
"medium": {
"desc": "Electricity",
"type": 1
},
"timeStamp": 1702392300
}
*/
type emuMessage1 struct {
ActivePowerL1 struct {
CfgPhase int `json:"cfgphase"`
Unit string `json:"unit"`
Value int `json:"value"`
} `json:"Active Power L1"`
ActivePowerL2 struct {
CfgPhase int `json:"cfgphase"`
Unit string `json:"unit"`
Value int `json:"value"`
} `json:"Active Power L2"`
ActivePowerL3 struct {
CfgPhase int `json:"cfgphase"`
Unit string `json:"unit"`
Value int `json:"value"`
} `json:"Active Power L3"`
ActivePowerL123 struct {
Unit string `json:"unit"`
Value int `json:"value"`
} `json:"Active Power L123"`
PowerfactorL1 struct {
CfgPhase int `json:"cfgphase"`
Unit string `json:"unit"`
Value float32 `json:"value"`
ValueRaw float32 `json:"value_raw"`
} `json:"Powerfactor L1"`
PowerfactorL2 struct {
CfgPhase int `json:"cfgphase"`
Unit string `json:"unit"`
Value float32 `json:"value"`
ValueRaw float32 `json:"value_raw"`
} `json:"Powerfactor L2"`
PowerfactorL3 struct {
CfgPhase int `json:"cfgphase"`
Unit string `json:"unit"`
Value float32 `json:"value"`
ValueRaw float32 `json:"value_raw"`
} `json:"Powerfactor L3"`
ErrorCode struct {
CTRatioChange bool `json:"CTRatioChange"`
ImpulseRatioChange bool `json:"ImpulseRatioChange"`
ImpulseWidthChange bool `json:"ImpulseWidthChange"`
LogbookFull bool `json:"LogbookFull"`
PowerFail bool `json:"PowerFail"`
TimeChanged bool `json:"TimeChanged"`
VTRatioChange bool `json:"VTRatioChange"`
Value int `json:"value"`
} `json:"errorcode"`
Medium struct {
Desc string `json:"desc"`
Type int `json:"type"`
} `json:"medium"`
Timestamp int `json:"timestamp"`
}
func Parse(fPort int, decodedPayload []byte, _ string, variables *map[string]database.VariableType, _ *map[string]interface{}, _ *database.Device) error {
//log.Printf("Parse input: %d, %s", fPort, decodedPayload)
switch fPort {
case 1:
var emuMessage1 emuMessage1
err := json.Unmarshal(decodedPayload, &emuMessage1)
if err != nil {
return fmt.Errorf("Unable to parse payload, fPort %d, error %s", fPort, err)
}
(*variables)["ActivePowerL1"] = database.VariableType {
Variable: "ActivePowerL1",
Unit: emuMessage1.ActivePowerL1.Unit,
Value: emuMessage1.ActivePowerL1.Value,
}
(*variables)["ActivePowerL2"] = database.VariableType {
Variable: "ActivePowerL2",
Unit: emuMessage1.ActivePowerL2.Unit,
Value: emuMessage1.ActivePowerL2.Value,
}
(*variables)["ActivePowerL3"] = database.VariableType {
Variable: "ActivePowerL3",
Unit: emuMessage1.ActivePowerL3.Unit,
Value: emuMessage1.ActivePowerL3.Value,
}
(*variables)["ActivePowerL123"] = database.VariableType {
Variable: "ActivePowerL123",
Unit: emuMessage1.ActivePowerL123.Unit,
Value: emuMessage1.ActivePowerL123.Value,
}
(*variables)["PowerfactorL1"] = database.VariableType {
Variable: "PowerfactorL1",
Unit: emuMessage1.PowerfactorL1.Unit,
Value: emuMessage1.PowerfactorL1.Value,
}
(*variables)["PowerfactorL2"] = database.VariableType {
Variable: "PowerfactorL2",
Unit: emuMessage1.PowerfactorL2.Unit,
Value: emuMessage1.PowerfactorL2.Value,
}
(*variables)["PowerfactorL3"] = database.VariableType {
Variable: "PowerfactorL3",
Unit: emuMessage1.PowerfactorL3.Unit,
Value: emuMessage1.PowerfactorL3.Value,
}
return nil
case 2:
var emuMessage2 emuMessage2
err := json.Unmarshal(decodedPayload, &emuMessage2)
if err != nil {
return fmt.Errorf("Unable to parse payload, fPort %d, error %s", fPort, err)
}
(*variables)["ActiveEnergyExport"] = database.VariableType {
Variable: "ActiveEnergyExport",
Unit: emuMessage2.ActiveEnergyExport.Unit,
Value: emuMessage2.ActiveEnergyExport.Value,
}
(*variables)["ActiveEnergyImport"] = database.VariableType {
Variable: "ActiveEnergyImport",
Unit: emuMessage2.ActiveEnergyImport.Unit,
Value: emuMessage2.ActiveEnergyImport.Value,
}
(*variables)["ReactiveEnergyExport"] = database.VariableType {
Variable: "ReactiveEnergyExport",
Unit: emuMessage2.ReactiveEnergyExport.Unit,
Value: emuMessage2.ReactiveEnergyExport.Value,
}
(*variables)["ReactiveEnergyImport"] = database.VariableType {
Variable: "ReactiveEnergyImport",
Unit: emuMessage2.ReactiveEnergyImport.Unit,
Value: emuMessage2.ReactiveEnergyImport.Value,
}
return nil
default:
return fmt.Errorf("Unexpected fPort %d", fPort)
}
}

View File

@ -0,0 +1,61 @@
package hottisScd30
import (
//"log"
"fmt"
"bytes"
"encoding/base64"
"encoding/binary"
"udi/database"
)
type hottisScd30Values struct {
Status uint8
CO2Conc int32
Temp int32
Hum int32
}
func Parse(fPort int, _ []byte, frmPayload string, variables *map[string]database.VariableType, attributes *map[string]interface{}, _ *database.Device) error {
if fPort != 2 {
return fmt.Errorf("Unexpected fPort %d", fPort)
}
b, err := base64.StdEncoding.DecodeString(frmPayload)
if err != nil {
return fmt.Errorf("Unable to base64-decode payload: %v", err)
}
var values hottisScd30Values
err = binary.Read(bytes.NewReader(b), binary.LittleEndian, &values)
if err != nil {
return fmt.Errorf("Unable to cast into struct: %v", err)
}
var co2concentration float32 = float32(values.CO2Conc) / 100;
var temperature float32 = float32(values.Temp) / 100;
var humidity float32 = float32(values.Hum) / 100;
// log.Printf("CO2: %f, Temp: %f, Hum: %f, Status: %d", co2concentration, temperature, humidity, values.Status)
(*variables)["CO2concentration"] = database.VariableType {
Label: "CO2concentration",
Variable: "Concentration",
Unit: "ppm",
Value: co2concentration,
}
(*variables)["Temperature"] = database.VariableType {
Label: "Temperature",
Variable: "Temperature",
Unit: "°C",
Value: temperature,
}
(*variables)["Humidity"] = database.VariableType {
Label: "Humidity",
Variable: "Humidity",
Unit: "%",
Value: humidity,
}
(*attributes)["Status"] = values.Status
return nil
}

View File

@ -0,0 +1,25 @@
package rawPayloadPrinter
import (
"log"
"fmt"
"encoding/base64"
"encoding/hex"
"udi/database"
)
func Parse(fPort int, _ []byte, frmPayload string, variables *map[string]database.VariableType, _ *map[string]interface{}, _ *database.Device) error {
if fPort != 2 {
return fmt.Errorf("Unexpected fPort %d", fPort)
}
bytes, err := base64.StdEncoding.DecodeString(frmPayload)
if err != nil {
return fmt.Errorf("Unable to base64-decode payload: %v", err)
}
hexString := hex.EncodeToString(bytes)
log.Printf("Payload: %s", hexString)
return nil
}

View File

@ -1,24 +1,176 @@
package ttn
import "log"
import "udi/handlers/handler"
import (
"fmt"
//"log"
"time"
"encoding/json"
"udi/config"
"udi/handlers/handler"
"udi/handlers/ttn/models/emuProfIILoRaCfg1"
"udi/handlers/ttn/models/draginoLdds75"
"udi/handlers/ttn/models/draginoLmds200"
"udi/handlers/ttn/models/draginoLse01"
"udi/handlers/ttn/models/draginoLsn50"
"udi/handlers/ttn/models/rawPayloadPrinter"
"udi/handlers/ttn/models/hottisScd30"
"udi/database"
)
var idSeq int = 0
type TTNHandler struct {
id int
handler.CommonHandler
dbh *database.DatabaseHandle
}
func NewTTNHandler() *TTNHandler {
type DecodedPayloaderHolder struct {
Payload []byte
}
type uplinkMessage struct {
EndDeviceIds struct {
DeviceId string `json:"device_id"`
ApplicationIds struct {
ApplicationId string `json:"application_id"`
} `json:"application_ids"`
DevEui string `json:"dev_eui"`
JoinEui string `json:"join_eui"`
DevAddr string `json:"dev_addr"`
} `json:"end_device_ids"`
ReceivedAt string `json:"received_at"`
UplinkMessage struct {
FCnt int `json:"f_cnt"`
FPort int `json:"f_port"`
FrmPayload string `json:"frm_payload"`
DecodedPayload DecodedPayloaderHolder `json:"decoded_payload"`
RxMetadata []struct {
GatewayIds struct {
GatewayId string `json:"gateway_id"`
Eui string `json:"eui"`
} `json:"gateway_ids"`
Time string `json:"time"`
Rssi int `json:"rssi"`
ChannelRssi int `json:"channel_rssi"`
Snr float32 `json:"snr"`
ChannelIndex int `json:"channel_index"`
} `json:"rx_metadata"`
ConsumedAirtime string `json:"consumed_airtime"`
} `json:"uplink_message"`
}
type gatewayAttributes struct {
GatewayId string `json:"gateway_id"`
Rssi int `json:"rssi"`
Snr float32 `json:"snr"`
}
type attributes struct {
DeviceId string `json:"device_id"`
ApplicationId string `json:"application_id"`
FCnt int `json:"f_cnt"`
FPort int `json:"f_port"`
FrmPayload string `json:"frm_payload"`
Gateways []gatewayAttributes `json:"gateways"`
ConsumedAirtime string `json:"consumed_airtime"`
}
func (self *DecodedPayloaderHolder) UnmarshalJSON(data []byte) error {
self.Payload = data
return nil
}
func New(id string, config config.HandlerConfigT) handler.Handler {
t := &TTNHandler {
id: idSeq,
}
idSeq += 1
t.Id = id
t.dbh = database.NewDatabaseHandle()
return t
}
func (self *TTNHandler) Handle(message handler.MessageT) {
log.Printf("Handler TTN %d processing %s -> %s", self.id, message.Topic, message.Payload)
//log.Printf("Handler TTN %d processing %s -> %s", self.Id, message.Topic, message.Payload)
var measurement database.Measurement
measurement.Time = time.Now()
var uplinkMessage uplinkMessage
err := json.Unmarshal([]byte(message.Payload), &uplinkMessage)
if err != nil {
self.Lost("Error when unmarshaling message", err, message)
return
}
//log.Printf("Parsed message: %s", uplinkMessage)
var attributes attributes
attributes.DeviceId = uplinkMessage.EndDeviceIds.DeviceId
attributes.ApplicationId = uplinkMessage.EndDeviceIds.ApplicationIds.ApplicationId
attributes.FCnt = uplinkMessage.UplinkMessage.FCnt
attributes.FPort = uplinkMessage.UplinkMessage.FPort
attributes.FrmPayload = uplinkMessage.UplinkMessage.FrmPayload
attributes.ConsumedAirtime = uplinkMessage.UplinkMessage.ConsumedAirtime
for _, rxm := range uplinkMessage.UplinkMessage.RxMetadata {
//log.Printf("RXM: %s", rxm)
g := gatewayAttributes { GatewayId: rxm.GatewayIds.GatewayId, Rssi: rxm.Rssi, Snr: rxm.Snr }
attributes.Gateways = append(attributes.Gateways, g)
}
//log.Printf("Attributes: %s", attributes)
measurement.Attributes = map[string]interface{} {
"DeviceId": attributes.DeviceId,
"ApplicationId": attributes.ApplicationId,
"FCnt": attributes.FCnt,
"FPort": attributes.FPort,
"FrmPayload": attributes.FrmPayload,
"Gateways": attributes.Gateways,
"ConsumedAirtime": attributes.ConsumedAirtime,
}
//log.Printf("ApplicationId: %s, DeviceId: %s", attributes.ApplicationId, attributes.DeviceId)
device, err2 := self.dbh.GetDeviceByLabelAndApplication(attributes.ApplicationId, attributes.DeviceId)
if err2 != nil {
self.Lost("Error when loading device", err2, message)
return
}
measurement.Application = attributes.ApplicationId
measurement.Device = attributes.DeviceId
measurement.Attributes["DeviceType"] = device.DeviceType.ModelIdentifier
//log.Printf("DeviceLabel: %s, DeviceType: %s", device.Label, device.DeviceType.ModelIdentifier)
var parser func(int, []byte, string, *map[string]database.VariableType, *map[string]interface{}, *database.Device) error
switch device.DeviceType.ModelIdentifier {
case "emu-prof-ii-lora-cfg1":
parser = emuProfIILoRaCfg1.Parse
case "dragino-ldds75":
parser = draginoLdds75.Parse
case "dragino-lmds200":
parser = draginoLmds200.Parse
case "dragino-lse01":
parser = draginoLse01.Parse
case "dragino-lsn50":
parser = draginoLsn50.Parse
case "raw-payload-printer":
parser = rawPayloadPrinter.Parse
case "hottis-scd30":
parser = hottisScd30.Parse
default:
self.Lost(fmt.Sprintf("No parser found for %s", device.DeviceType.ModelIdentifier), nil, message)
return
}
measurement.Values = make(map[string]database.VariableType)
err3 := parser(uplinkMessage.UplinkMessage.FPort,
uplinkMessage.UplinkMessage.DecodedPayload.Payload,
uplinkMessage.UplinkMessage.FrmPayload,
&(measurement.Values),
&(measurement.Attributes),
device)
if err3 != nil {
self.Lost("Model parser failed", err3, message)
return
}
//log.Printf("Prepared measurement item: %s", measurement)
self.dbh.StoreMeasurement(&measurement)
self.S()
}

View File

@ -5,6 +5,7 @@ import "os"
import "os/signal"
import "udi/mqtt"
import "udi/config"
import "udi/counter"
import "udi/dispatcher"
@ -23,6 +24,8 @@ func main() {
mqtt.StartMqttClient()
defer mqtt.StopMqttClient()
counter.InitCounter()
log.Println("UDI running")
c := make(chan os.Signal, 1)

View File

@ -7,6 +7,7 @@ import MQTT "github.com/eclipse/paho.mqtt.golang"
import "github.com/google/uuid"
import "crypto/tls"
import "udi/config"
import "udi/counter"
type Message struct {
Topic string
@ -26,10 +27,12 @@ func onMessageReceived(client MQTT.Client, message MQTT.Message) {
}
select {
case InputChannel <- m:
counter.S("Received")
{}
//log.Println("Message sent to channel")
default:
log.Println("Channel full, message lost")
counter.F("Received")
}
}
@ -54,7 +57,7 @@ func onConnect(client MQTT.Client) {
if token := client.Subscribe(topic, 0, onMessageReceived); token.Wait() && token.Error() != nil {
log.Fatalf("Unable to subscribe to topic %s, error %s", topic, token.Error())
}
log.Printf("Successfully subscribed to topic %s", topic)
log.Printf("Topic %s subscribed", topic)
}
}
@ -101,19 +104,19 @@ func StartMqttClient() {
enableTls := config.Config.Mqtt.TlsEnable
if enableTls == "true" {
log.Println("Enabling TLS connection")
//log.Println("Enabling TLS connection")
tlsConfig := &tls.Config {
InsecureSkipVerify: true,
}
opts.SetTLSConfig(tlsConfig)
}
log.Println("Trying to connect to broker")
log.Println("Broker connecting")
mqttClient = MQTT.NewClient(opts)
if token := mqttClient.Connect(); token.Wait() && token.Error() != nil {
log.Fatalf("Unable to connect to broker %s, error %s", broker, token.Error())
}
log.Printf("Successfully connected to broker %s", broker)
//log.Printf("Successfully connected to broker %s", broker)
go outputDispatcher(mqttClient)