Compare commits

...

7 Commits

Author SHA1 Message Date
e209598f9e secrets
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2023-12-24 15:57:53 +01:00
03f8f9fade fix in deploy script
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2023-12-24 14:41:49 +01:00
fc91a0da2e change db password approach
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2023-12-24 14:28:46 +01:00
7d8d8b1c6a soil hottis
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2023-12-24 11:05:37 +01:00
ffbda52c36 migration stuff
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
2023-12-23 22:57:27 +01:00
647a2d36e5 saerbeck
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline was successful
2023-12-23 21:57:43 +01:00
a8db62ea52 LSE01
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
2023-12-22 17:28:32 +01:00
13 changed files with 157 additions and 95 deletions

View File

@ -35,9 +35,12 @@ for NAMESPACE_DIR in `find $INSTANCES_DIR -type d -mindepth 1 -maxdepth 1`; do
INSTANCE=`basename $INSTANCE_DIR` INSTANCE=`basename $INSTANCE_DIR`
echo "Instance: $INSTANCE" echo "Instance: $INSTANCE"
# set secret configuration from encrypted and decrypted file
VARIABLE_PREFIX=`echo "$NAMESPACE""_""$INSTANCE" | tr - _`
# set MQTT_PASSWORD as secret # set MQTT_PASSWORD as secret
MQTT_PASSWORD_VARIABLE="$NAMESPACE""_""$INSTANCE""_MQTT_PASSWORD" MQTT_PASSWORD_VARIABLE=$VARIABLE_PREFIX"_MQTT_PASSWORD"
MQTT_PASSWORD_VARIABLE=`echo $MQTT_PASSWORD_VARIABLE | tr - _`
MQTT_PASSWORD="${!MQTT_PASSWORD_VARIABLE}" MQTT_PASSWORD="${!MQTT_PASSWORD_VARIABLE}"
# echo "MQTT_PASSWORD_VARIABLE: $MQTT_PASSWORD_VARIABLE" # echo "MQTT_PASSWORD_VARIABLE: $MQTT_PASSWORD_VARIABLE"
# echo "MQTT_PASSWORD: $MQTT_PASSWORD" # echo "MQTT_PASSWORD: $MQTT_PASSWORD"
@ -48,25 +51,15 @@ for NAMESPACE_DIR in `find $INSTANCES_DIR -type d -mindepth 1 -maxdepth 1`; do
--save-config | \ --save-config | \
kubectl apply -f - -n $NAMESPACE kubectl apply -f - -n $NAMESPACE
# set database configuration as secret
## prepare configuration to access database to set udi database password
PGUSER=`kubectl get secret -n database timescaledb -o jsonpath="{.data.superuser-username}" | base64 -d`
PGHOST=`kubectl get services traefik -n system -o jsonpath="{.status.loadBalancer.ingress[0].ip}"`
PGPASSWORD=`kubectl get secret -n database timescaledb -o jsonpath="{.data.superuser-password}" | base64 -d`
PGSSLMODE=require
NEW_UDI_DB_LOGIN="udi""-""$NAMESPACE""-""$INSTANCE" LOGIN_VARIABLE=$VARIABLE_PREFIX"_PGUSER"
NEW_UDI_DB_PASSWORD=`tr -dc 'a-zA-Z0-9' < /dev/urandom | head -c 32` NEW_UDI_DB_LOGIN="${!LOGIN_VARIABLE}"
NEW_UDI_DB_DATABASE="udi""-""$NAMESPACE""-""$INSTANCE" PASSWORD_VARIABLE=$VARIABLE_PREFIX"_PGPASSWORD"
NEW_UDI_DB_PASSWORD="${!PASSWORD_VARIABLE}"
DATABASE_VARIABLE=$VARIABLE_PREFIX"_PGDATABASE"
NEW_UDI_DB_DATABASE="${!DATABASE_VARIABLE}"
NEW_UDI_DB_HOST=timescaledb.database.svc.cluster.local NEW_UDI_DB_HOST=timescaledb.database.svc.cluster.local
DATABASE_MASTER_POD=`kubectl get pods -n database -l app=StackGresCluster -l role=master -o jsonpath='{.items[0].metadata.name}'`
kubectl exec -i $DATABASE_MASTER_POD -c postgres-util -n database -- psql <<EOF
BEGIN;
ALTER USER "$NEW_UDI_DB_LOGIN" WITH PASSWORD '$NEW_UDI_DB_PASSWORD';
COMMIT;
EOF
kubectl create secret generic $INSTANCE-udi-db-cred \ kubectl create secret generic $INSTANCE-udi-db-cred \
--dry-run=client \ --dry-run=client \
-o yaml \ -o yaml \

View File

@ -11,13 +11,12 @@
"handler": "TTN", "handler": "TTN",
"id": "TTN0", "id": "TTN0",
"config": { "config": {
"databaseConnStr": "",
"attributes": { "attributes": {
} }
} }
} }
], ],
"archiver": { "archiver": {
"dir": "./tmp/udi" "dir": "/archive"
} }
} }

View File

@ -1,17 +1,16 @@
{ {
"mqtt": { "mqtt": {
"broker": "ssl://eu1.cloud.thethings.network:8883", "broker": "ssl://eu1.cloud.thethings.network:8883",
"username": "com-passavant-geiger-poc@ttn", "username": "de-hottis-lora-test1@ttn",
"password": "ENV", "password": "ENV",
"tlsEnable": "true" "tlsEnable": "true"
}, },
"topicMappings": [ "topicMappings": [
{ {
"topics": [ "v3/com-passavant-geiger-poc@ttn/devices/#" ], "topics": [ "v3/#" ],
"handler": "TTN", "handler": "TTN",
"id": "TTN0", "id": "TTN0",
"config": { "config": {
"databaseConnStr": "",
"attributes": { "attributes": {
} }
} }

View File

@ -1,23 +1,22 @@
{ {
"mqtt": { "mqtt": {
"broker": "ssl://eu1.cloud.thethings.network:8883", "broker": "ssl://eu1.cloud.thethings.network:8883",
"username": "com-passavant-geiger-poc@ttn", "username": "de-hottis-app01@ttn",
"password": "ENV", "password": "ENV",
"tlsEnable": "true" "tlsEnable": "true"
}, },
"topicMappings": [ "topicMappings": [
{ {
"topics": [ "v3/com-passavant-geiger-poc@ttn/devices/#" ], "topics": [ "v3/#" ],
"handler": "TTN", "handler": "TTN",
"id": "TTN0", "id": "TTN0",
"config": { "config": {
"databaseConnStr": "",
"attributes": { "attributes": {
} }
} }
} }
], ],
"archiver": { "archiver": {
"dir": "./tmp/udi" "dir": "/archive"
} }
} }

View File

@ -1,7 +1,32 @@
U2FsdGVkX1+jsPd67Sl8bXkH/OVKx3clFUnpJBzwRXA9qEhyDrwMJWg3UH6eiCZd U2FsdGVkX1+DXC4uFXaRWr40xvTwUDMfmx3gZmixNJWP4djN5e5JZYmq2uWB/kQr
PIwApBbDKxR0W/NotaE9100gQo+L3tHL0elMo/YOFkiwkM3tJ1v3zngY1BUoEAGJ 0eCD4UM9cRnwyqCJudsOJnB8pT6XQgl/ZkSZavSOxG7r0uh90IqOe25nxWH3iiza
4aZNVC7Eb9HDG0RKqh7fSPuazeKxd1fih9nQuxy//XgRivqjbgqfD/xLi5NZ/n0F oPWW0qR9KXB4qNQEAHkoww+dz7B2zFaDSQPgzm2oV9SWXfjhu0nDPcBO2e3gzSvU
nJT1MsBKGmYF4ezC7thSYIN/+GDL6gpty/FYxAi56oFy6q2YGuADXPgRs1v22vz3 vCuwLnmG/4oacBgAeJHyys2NmW1e2ZnjbFOT+hMBtGPwwEIQ/mbq7IWrfiREUJ7B
KP+9mIeg6VZn6IRv00w0HSJ3UghMMx/b3pDjIVrq9I3x9sXDD1YqYbjjcnopqKsl U1LoN4NPnkTtbFf63AkuQ6Lq0mkH6a6ZoVfkyg7kS9VIhznoDcZb29S/N6cGgoRG
+gVrX/jCsDBmArRXFu1innOJ+pOsHJ3sSR5fRugnn0Fgje63n4mN46eJYdwhHgEr KFu1VbyjoCXnskZ/a9rw4e3Epau9qoeupmALlmcogOK3J0g1EhltmFMsfMnTDIkf
yIuyMipc0Uc1ZOsEpkrmYnEofgGaWC3IrdsVEwybbGQ= Pj3t4+WZn0xozX89gLwNYYATALQfL+mAFRHpwx924Mh12tRzgSHyq+BHbcI5yjKA
eNyP+gUT6nmrSGhlwBXsUzILsOCxxdhNsSd0h3S3huhuS+RivnXpGJQnR5vXmc8d
iQE28Rx9YyrOM8+lpIL5mz7O7cSjEzwuIWLsnvs2nowYl8erWu7e+R9e2e8ulAgt
UokWp2tOMFfvPU6bHJwDcre42Ozv8QefPQb9E8hcZe6U3ibpw715sV4YvSchxLCF
cGzwgRSceKd+LOuoXOTfcUAvMzkOcV5/VRbX/ZbESt5ntw+g2AZqp3T+7iAnPmtx
q2RN94Yes/8yKkvAV+wO6qBv7mCn79ZPYdf8C+eWVQv/uJ9BRmJMxy0FfUFz+kY0
+QL72T1BJwBRWkWmAmOuQSvQ6q+MtzekwuXfQgmcdtdWZAUxcnsjJBKAFRthEgU9
/N2UkHezMx5hn2wa7K+Z8v29zmlwQaRJGVW0xzpzKlYrneAqm65o+aIEtWMiIUwz
zuCPN7tZcjxzOmwnLXupOclw2fE76vFzxc8g6pRH5bbpmNynB6SszCM8Gya2LUOV
OK8B7fThcx4XimWRE8jrBQHCaYEEqgXaLxUSrMV7s9yLbKRQI24YTWcIMY8pmp5t
/Viq6qqtiWZ7BwrtTR8KFzNEJcz5hC7LDaP1hJR4MEbRv8VPsyM3mxOWflmmR6jI
x8UBv585SLRiqNMFqJ+BryrBbiwtbm15z1jUyQTSGM8J+dULbLgDp1O1fQphXJKV
7dbkV5CXPoglOK/jy3bj4zSfG0Fr068aBkXvkBlJ2X088Xxoj59mep+ZI0SJj4L1
vK6EecRxUHUQUltarj9A2LaHw5iqG5QD5px3O1wN4xqiuh95FuwmvIa2DNlclgca
4xzWXUFwoJFoKLsaX8dkxC3Zs4YybSEpRntU8+ElQeAQoDB5gPgjvKoep+JHCSWZ
w+ZhNT+F5+tbRToPwyWn9k9lRBaDcDWQAOQtcoGxzZ0I4j0CWnC0uzfTJ48skhKk
xBo5sujuyZ3m0I/icdVY0hAt9Ok+3hB4hrvfMAT13zK0u+a917d6HKfeh95BNXAN
CzSUtC/J0VU1tk2cF71pJS6T1oTxm/+ptwPMclOiKqzgkxoZzITd08JLe6d50HCI
fw4LL7Z65HOE4kxHySxtCHWBw0d+44C+H9+g6SKWDNCUpv+xbc9VAMM6/rl8vW2G
bJHkSqxvx2mi3X3Ti4BLXNb7IWYjJVirTNtzYXbqgoPDotviuyoyB3v9bAGbg+gQ
KpzQJR+j+ODHITG9wJs14WtL/Ll3TO6Tz3XGfmgLiPs7N5oReNdQYrW5TadzttoQ
+WDKYoFXLXyHOT43BHRu+6V16Mpj/khdR7DFoj5AKbS3IoSuMiniowf2sztvtnsY
j7jwL3zaNv7qSf0p4TYo1HhXSggunaDRqBebpNVKAbHTU0ygiSizZAKIXb97/Gbp
rop1vSH0GNZWcV653vNFCKoSecVPwAA7LRQcW1RpyzE/NBdRLmh+rbONeh9FlJA4
JWpMK7RNA0JsaTy7Ti9/I7cYxUpAxP/6oHaH+P16bpoppyx6toH0Q94uXTU/Nlpl
PpipMYgTHN8SPwSBWUzIYQ==

1
migration/.gitignore vendored Normal file
View File

@ -0,0 +1 @@
.venv

View File

@ -0,0 +1,29 @@
import psycopg2
from loguru import logger
try:
srcConn = psycopg2.connect(database="level_monitoring_berresheim")
srcConn.autocommit = False
with srcConn.cursor() as srcCur:
srcCur.execute("select time, application_name, raw_level, level, status, battery from measurement_t")
for srcObj in srcCur:
timestamp = srcObj[0]
deviceName = srcObj[1]
rawLevel = srcObj[2]
level = srcObj[3]
status = srcObj[4]
battery = srcObj[5]
logger.info(f"{timestamp=}, {deviceName=}, {rawLevel=}, {level=}, {status=}, {battery=}")
destTime = timestamp
destApplication = "de-hottis-level-monitoring"
destDevice = "eui-a84041a2c18341d6"
destAttributes = '{"ApplicationId":"de-hottis-level-monitoring", "DeviceType":"dragino-ldds75", "Hint": "Migrated"}'
destValues = '{"Battery":{"unit":"V","label":"Battery","value":' + str(battery) + ',"variable":"Voltage"}, "Distance":{"unit":mm","label":"Distance","variable":"Level","value":' + str(rawLevel) + '}, "CorrectedDistance":{"unit":"mm", "label":"CorrectedDistance", "variable":"Level","value":' + str(level) + '}}'
logger.info(f"{destTime=}, {destApplication=}, {destDevice=}, {destAttributes=}, {destValues=}")
finally:
if srcConn:
srcConn.close()

View File

@ -0,0 +1,2 @@
loguru==0.7.2
psycopg2==2.9.9

View File

@ -1,21 +0,0 @@
{
"mqtt": {
"broker": "mqtt://172.23.1.102:1883",
"tlsEnable": "false"
},
"topicMappings": [
{
"topics": [ "ttn/#" ],
"handler": "TTN",
"id": "TTN0",
"config": {
"databaseConnStr": "",
"attributes": {
}
}
}
],
"archiver": {
"dir": "./tmp/udi"
}
}

View File

@ -1,8 +1,7 @@
{ {
"mqtt": { "mqtt": {
"broker": "ssl://eu1.cloud.thethings.network:8883", "broker": "ssl://eu1.cloud.thethings.network:8883",
"username": "de-hottis-level-monitoring@ttn", "username": "de-hottis-lora-test1@ttn",
"passwordEnvVar": "MQTT_PASSWORD",
"tlsEnable": "true" "tlsEnable": "true"
}, },
"topicMappings": [ "topicMappings": [
@ -11,7 +10,6 @@
"handler": "TTN", "handler": "TTN",
"id": "TTN0", "id": "TTN0",
"config": { "config": {
"databaseConnStr": "",
"attributes": { "attributes": {
} }
} }

View File

@ -1,29 +0,0 @@
{
"mqtt": {
"broker": "mqtt://172.23.1.102:1883",
"tlsEnable": "false"
},
"topicMappings": [
{
"topics": [ "v3/de-hottis-level-monitoring@ttn/devices/+/up" ],
"handler": "TTN"
}
],
"handlers": [
{
"name": "TTN",
"databaseConnStr": "",
"attributes": {
}
},
{
"name": "IoT",
"databaseConnStr": "",
"attributes": {
}
}
],
"archiver": {
"dir": "./tmp/udi"
}
}

View File

@ -0,0 +1,63 @@
package draginoLse01
import (
"fmt"
"encoding/json"
"udi/database"
)
/*
{
"Bat":3.211,
"TempC_DS18B20":"0.0",
"conduct_SOIL":32,
"temp_SOIL":"7.56",
"water_SOIL":"25.92"
}
*/
type message struct {
Bat float32 `json:"Bat"`
TempC_DS18B20 string `json:"TempC_DS18B20"`
Conduct_SOIL int `json:"conduct_SOIL"`
Temp_SOIL string `json:"temp_SOIL"`
Water_SOIL string `json:"water_SOIL"`
}
func Parse(fPort int, decodedPayload []byte, variables *map[string]database.VariableType, device *database.Device) error {
if fPort != 2 {
return fmt.Errorf("Unexpected fPort %d", fPort)
}
var message message
err := json.Unmarshal(decodedPayload, &message)
if err != nil {
return fmt.Errorf("Unable to parse payload, fPort %d, error %s", fPort, err)
}
(*variables)["Battery"] = database.VariableType {
Label: "Battery",
Variable: "Voltage",
Unit: "V",
Value: message.Bat,
}
(*variables)["Conductance"] = database.VariableType {
Label: "Conductance",
Variable: "Conductance",
Unit: "uS/cm",
Value: message.Conduct_SOIL,
}
(*variables)["Temperature"] = database.VariableType {
Label: "Temperature",
Variable: "Temperature",
Unit: "°C",
Value: message.Temp_SOIL,
}
(*variables)["Water"] = database.VariableType {
Label: "Water",
Variable: "Water",
Unit: "%",
Value: message.Water_SOIL,
}
return nil
}

View File

@ -2,6 +2,7 @@ package ttn
import ( import (
"fmt" "fmt"
"log"
"time" "time"
"encoding/json" "encoding/json"
"udi/config" "udi/config"
@ -9,6 +10,7 @@ import (
"udi/handlers/ttn/models/emuProfIILoRaCfg1" "udi/handlers/ttn/models/emuProfIILoRaCfg1"
"udi/handlers/ttn/models/draginoLdds75" "udi/handlers/ttn/models/draginoLdds75"
"udi/handlers/ttn/models/draginoLmds200" "udi/handlers/ttn/models/draginoLmds200"
"udi/handlers/ttn/models/draginoLse01"
"udi/database" "udi/database"
) )
@ -83,7 +85,7 @@ func New(id string, config config.HandlerConfigT) handler.Handler {
} }
func (self *TTNHandler) Handle(message handler.MessageT) { func (self *TTNHandler) Handle(message handler.MessageT) {
// log.Printf("Handler TTN %d processing %s -> %s", self.id, message.Topic, message.Payload) log.Printf("Handler TTN %d processing %s -> %s", self.Id, message.Topic, message.Payload)
var measurement database.Measurement var measurement database.Measurement
measurement.Time = time.Now() measurement.Time = time.Now()
@ -94,7 +96,7 @@ func (self *TTNHandler) Handle(message handler.MessageT) {
self.Lost("Error when unmarshaling message", err, message) self.Lost("Error when unmarshaling message", err, message)
return return
} }
//log.Printf("Parsed message: %s", uplinkMessage) log.Printf("Parsed message: %s", uplinkMessage)
var attributes attributes var attributes attributes
attributes.DeviceId = uplinkMessage.EndDeviceIds.DeviceId attributes.DeviceId = uplinkMessage.EndDeviceIds.DeviceId
@ -104,11 +106,11 @@ func (self *TTNHandler) Handle(message handler.MessageT) {
attributes.FrmPayload = uplinkMessage.UplinkMessage.FrmPayload attributes.FrmPayload = uplinkMessage.UplinkMessage.FrmPayload
attributes.ConsumedAirtime = uplinkMessage.UplinkMessage.ConsumedAirtime attributes.ConsumedAirtime = uplinkMessage.UplinkMessage.ConsumedAirtime
for _, rxm := range uplinkMessage.UplinkMessage.RxMetadata { for _, rxm := range uplinkMessage.UplinkMessage.RxMetadata {
//log.Printf("RXM: %s", rxm) log.Printf("RXM: %s", rxm)
g := gatewayAttributes { GatewayId: rxm.GatewayIds.GatewayId, Rssi: rxm.Rssi, Snr: rxm.Snr } g := gatewayAttributes { GatewayId: rxm.GatewayIds.GatewayId, Rssi: rxm.Rssi, Snr: rxm.Snr }
attributes.Gateways = append(attributes.Gateways, g) attributes.Gateways = append(attributes.Gateways, g)
} }
//log.Printf("Attributes: %s", attributes) log.Printf("Attributes: %s", attributes)
measurement.Attributes = map[string]interface{} { measurement.Attributes = map[string]interface{} {
"DeviceId": attributes.DeviceId, "DeviceId": attributes.DeviceId,
"ApplicationId": attributes.ApplicationId, "ApplicationId": attributes.ApplicationId,
@ -119,7 +121,7 @@ func (self *TTNHandler) Handle(message handler.MessageT) {
"ConsumedAirtime": attributes.ConsumedAirtime, "ConsumedAirtime": attributes.ConsumedAirtime,
} }
//log.Printf("ApplicationId: %s, DeviceId: %s", attributes.ApplicationId, attributes.DeviceId) log.Printf("ApplicationId: %s, DeviceId: %s", attributes.ApplicationId, attributes.DeviceId)
device, err2 := self.dbh.GetDeviceByLabelAndApplication(attributes.ApplicationId, attributes.DeviceId) device, err2 := self.dbh.GetDeviceByLabelAndApplication(attributes.ApplicationId, attributes.DeviceId)
if err2 != nil { if err2 != nil {
self.Lost("Error when loading device", err2, message) self.Lost("Error when loading device", err2, message)
@ -129,7 +131,7 @@ func (self *TTNHandler) Handle(message handler.MessageT) {
measurement.Device = attributes.DeviceId measurement.Device = attributes.DeviceId
measurement.Attributes["DeviceType"] = device.DeviceType.ModelIdentifier measurement.Attributes["DeviceType"] = device.DeviceType.ModelIdentifier
//log.Printf("DeviceLabel: %s, DeviceType: %s", device.Label, device.DeviceType.ModelIdentifier) log.Printf("DeviceLabel: %s, DeviceType: %s", device.Label, device.DeviceType.ModelIdentifier)
var parser func(int, []byte, *map[string]database.VariableType, *database.Device) error var parser func(int, []byte, *map[string]database.VariableType, *database.Device) error
switch device.DeviceType.ModelIdentifier { switch device.DeviceType.ModelIdentifier {
@ -139,6 +141,8 @@ func (self *TTNHandler) Handle(message handler.MessageT) {
parser = draginoLdds75.Parse parser = draginoLdds75.Parse
case "dragino-lmds200": case "dragino-lmds200":
parser = draginoLmds200.Parse parser = draginoLmds200.Parse
case "dragino-lse01":
parser = draginoLse01.Parse
default: default:
self.Lost(fmt.Sprintf("No parser found for %s", device.DeviceType.ModelIdentifier), nil, message) self.Lost(fmt.Sprintf("No parser found for %s", device.DeviceType.ModelIdentifier), nil, message)
return return
@ -150,7 +154,7 @@ func (self *TTNHandler) Handle(message handler.MessageT) {
self.Lost("Model parser failed", err3, message) self.Lost("Model parser failed", err3, message)
return return
} }
//log.Printf("Prepared measurement item: %s", measurement) log.Printf("Prepared measurement item: %s", measurement)
self.dbh.StoreMeasurement(&measurement) self.dbh.StoreMeasurement(&measurement)
self.S() self.S()
} }