Compare commits
13 Commits
Author | SHA1 | Date | |
---|---|---|---|
73aaa2225d
|
|||
44b7461d19
|
|||
c54b335e5f
|
|||
956d1bdcdb
|
|||
b938d48c7f
|
|||
b374b7f49d
|
|||
879825a260
|
|||
b6132afb11
|
|||
5e94782575
|
|||
57c63adeb2
|
|||
e209598f9e
|
|||
03f8f9fade
|
|||
fc91a0da2e
|
@ -35,9 +35,12 @@ for NAMESPACE_DIR in `find $INSTANCES_DIR -type d -mindepth 1 -maxdepth 1`; do
|
|||||||
INSTANCE=`basename $INSTANCE_DIR`
|
INSTANCE=`basename $INSTANCE_DIR`
|
||||||
echo "Instance: $INSTANCE"
|
echo "Instance: $INSTANCE"
|
||||||
|
|
||||||
|
# set secret configuration from encrypted and decrypted file
|
||||||
|
VARIABLE_PREFIX=`echo "$NAMESPACE""_""$INSTANCE" | tr - _`
|
||||||
|
|
||||||
|
|
||||||
# set MQTT_PASSWORD as secret
|
# set MQTT_PASSWORD as secret
|
||||||
MQTT_PASSWORD_VARIABLE="$NAMESPACE""_""$INSTANCE""_MQTT_PASSWORD"
|
MQTT_PASSWORD_VARIABLE=$VARIABLE_PREFIX"_MQTT_PASSWORD"
|
||||||
MQTT_PASSWORD_VARIABLE=`echo $MQTT_PASSWORD_VARIABLE | tr - _`
|
|
||||||
MQTT_PASSWORD="${!MQTT_PASSWORD_VARIABLE}"
|
MQTT_PASSWORD="${!MQTT_PASSWORD_VARIABLE}"
|
||||||
# echo "MQTT_PASSWORD_VARIABLE: $MQTT_PASSWORD_VARIABLE"
|
# echo "MQTT_PASSWORD_VARIABLE: $MQTT_PASSWORD_VARIABLE"
|
||||||
# echo "MQTT_PASSWORD: $MQTT_PASSWORD"
|
# echo "MQTT_PASSWORD: $MQTT_PASSWORD"
|
||||||
@ -46,27 +49,17 @@ for NAMESPACE_DIR in `find $INSTANCES_DIR -type d -mindepth 1 -maxdepth 1`; do
|
|||||||
--dry-run=client \
|
--dry-run=client \
|
||||||
-o yaml \
|
-o yaml \
|
||||||
--save-config | \
|
--save-config | \
|
||||||
kubectl apply -f - -n $NAMESPACE
|
kubectl apply -f - -n $NAMESPACE
|
||||||
|
|
||||||
# set database configuration as secret
|
|
||||||
## prepare configuration to access database to set udi database password
|
|
||||||
PGUSER=`kubectl get secret -n database timescaledb -o jsonpath="{.data.superuser-username}" | base64 -d`
|
|
||||||
PGHOST=`kubectl get services traefik -n system -o jsonpath="{.status.loadBalancer.ingress[0].ip}"`
|
|
||||||
PGPASSWORD=`kubectl get secret -n database timescaledb -o jsonpath="{.data.superuser-password}" | base64 -d`
|
|
||||||
PGSSLMODE=require
|
|
||||||
|
|
||||||
NEW_UDI_DB_LOGIN="udi""-""$NAMESPACE""-""$INSTANCE"
|
LOGIN_VARIABLE=$VARIABLE_PREFIX"_PGUSER"
|
||||||
NEW_UDI_DB_PASSWORD=`tr -dc 'a-zA-Z0-9' < /dev/urandom | head -c 32`
|
NEW_UDI_DB_LOGIN="${!LOGIN_VARIABLE}"
|
||||||
NEW_UDI_DB_DATABASE="udi""-""$NAMESPACE""-""$INSTANCE"
|
PASSWORD_VARIABLE=$VARIABLE_PREFIX"_PGPASSWORD"
|
||||||
|
NEW_UDI_DB_PASSWORD="${!PASSWORD_VARIABLE}"
|
||||||
|
DATABASE_VARIABLE=$VARIABLE_PREFIX"_PGDATABASE"
|
||||||
|
NEW_UDI_DB_DATABASE="${!DATABASE_VARIABLE}"
|
||||||
NEW_UDI_DB_HOST=timescaledb.database.svc.cluster.local
|
NEW_UDI_DB_HOST=timescaledb.database.svc.cluster.local
|
||||||
|
|
||||||
DATABASE_MASTER_POD=`kubectl get pods -n database -l app=StackGresCluster -l role=master -o jsonpath='{.items[0].metadata.name}'`
|
|
||||||
kubectl exec -i $DATABASE_MASTER_POD -c postgres-util -n database -- psql <<EOF
|
|
||||||
BEGIN;
|
|
||||||
ALTER USER "$NEW_UDI_DB_LOGIN" WITH PASSWORD '$NEW_UDI_DB_PASSWORD';
|
|
||||||
COMMIT;
|
|
||||||
EOF
|
|
||||||
|
|
||||||
kubectl create secret generic $INSTANCE-udi-db-cred \
|
kubectl create secret generic $INSTANCE-udi-db-cred \
|
||||||
--dry-run=client \
|
--dry-run=client \
|
||||||
-o yaml \
|
-o yaml \
|
||||||
|
@ -1,15 +1,32 @@
|
|||||||
U2FsdGVkX18F3lXFVDFOS8Q3iGZsIPmMr9CvSshg1t4VTJ8fVlJ6LbFwT3SD1x4a
|
U2FsdGVkX1+DXC4uFXaRWr40xvTwUDMfmx3gZmixNJWP4djN5e5JZYmq2uWB/kQr
|
||||||
Z2IdRTWT8wPG7Y9w4c3xbfFZywsJbxyrY+i3Lh7qvpsj7CXiWGOahdj5Pqu1UE3g
|
0eCD4UM9cRnwyqCJudsOJnB8pT6XQgl/ZkSZavSOxG7r0uh90IqOe25nxWH3iiza
|
||||||
3PiMpmfyeHKn30hRhupO6jL7kzxJEV8uov/sUh7H+76/R1h5oBBxJEhCl9nPrrf8
|
oPWW0qR9KXB4qNQEAHkoww+dz7B2zFaDSQPgzm2oV9SWXfjhu0nDPcBO2e3gzSvU
|
||||||
7QaxFeJzogjHu+szvkOM5WylE/PuPv53OGF+7+6R5rK5v25pWlIhuUPg3CKk1qBg
|
vCuwLnmG/4oacBgAeJHyys2NmW1e2ZnjbFOT+hMBtGPwwEIQ/mbq7IWrfiREUJ7B
|
||||||
Ice6q2j5vQe1u+wEdJbxsvbwIdAQC8r6flPuwUeMfLe/Myiuo7v03Re45XKVCQAx
|
U1LoN4NPnkTtbFf63AkuQ6Lq0mkH6a6ZoVfkyg7kS9VIhznoDcZb29S/N6cGgoRG
|
||||||
H+i5Gh50KHqfoZliLeH0ShNSHPNh7zjuBF01o1/Y6KbKDga8G8FjGMn11kjkMCqX
|
KFu1VbyjoCXnskZ/a9rw4e3Epau9qoeupmALlmcogOK3J0g1EhltmFMsfMnTDIkf
|
||||||
KxdF6Fp1up02n4UTk0+m1ekEc2bk+NZ71xb/iBdsZtyK9POj2ZAuxrjxJZH2FzQ+
|
Pj3t4+WZn0xozX89gLwNYYATALQfL+mAFRHpwx924Mh12tRzgSHyq+BHbcI5yjKA
|
||||||
5lh/AgyH1LC8ms4KL5HjPLEIHQIkD/MJCnti/hQ7ZpoqFBRnTmRdv9w+pC/aXlmb
|
eNyP+gUT6nmrSGhlwBXsUzILsOCxxdhNsSd0h3S3huhuS+RivnXpGJQnR5vXmc8d
|
||||||
en6I8Mv82qe1sdVlyoiTAqL1flfUjd+yICUE04g/M9NPYidhc+FZrsqyeVfSJYOh
|
iQE28Rx9YyrOM8+lpIL5mz7O7cSjEzwuIWLsnvs2nowYl8erWu7e+R9e2e8ulAgt
|
||||||
eDl4owAUjzJuDBkuFAX1PHwKglNcIx/rNaxF8ROSDyv59Qzzi84U3xmfm4JqGp2X
|
UokWp2tOMFfvPU6bHJwDcre42Ozv8QefPQb9E8hcZe6U3ibpw715sV4YvSchxLCF
|
||||||
2DAMT2Du8fP/cpRrwWQNtFN7a0FxOnmhDO6SAZoDisEeWR3r+2KsnR6ztKWgGWHK
|
cGzwgRSceKd+LOuoXOTfcUAvMzkOcV5/VRbX/ZbESt5ntw+g2AZqp3T+7iAnPmtx
|
||||||
uTsb/g8fwx/KH2RsPukmiX7s8aTWcS5DyADNF8VgWKa3BU1icDlku+gMCINyHA9k
|
q2RN94Yes/8yKkvAV+wO6qBv7mCn79ZPYdf8C+eWVQv/uJ9BRmJMxy0FfUFz+kY0
|
||||||
iLR7qzBDXzZPLXj+tm7DjLnZYe2w2Ih7A3ZWsNI6Fp7wFFfqH3TYEM38VZWpzQmz
|
+QL72T1BJwBRWkWmAmOuQSvQ6q+MtzekwuXfQgmcdtdWZAUxcnsjJBKAFRthEgU9
|
||||||
gGa3vuIohn0Mt6JHeZ9zHc8wYruBhjeU16VHawP12jBSnFQnP/FeJq/WpUMW3YDY
|
/N2UkHezMx5hn2wa7K+Z8v29zmlwQaRJGVW0xzpzKlYrneAqm65o+aIEtWMiIUwz
|
||||||
c8jsXokMCKUNubnzsmeQgiL0721IfB6+KdiGMucZZkQ=
|
zuCPN7tZcjxzOmwnLXupOclw2fE76vFzxc8g6pRH5bbpmNynB6SszCM8Gya2LUOV
|
||||||
|
OK8B7fThcx4XimWRE8jrBQHCaYEEqgXaLxUSrMV7s9yLbKRQI24YTWcIMY8pmp5t
|
||||||
|
/Viq6qqtiWZ7BwrtTR8KFzNEJcz5hC7LDaP1hJR4MEbRv8VPsyM3mxOWflmmR6jI
|
||||||
|
x8UBv585SLRiqNMFqJ+BryrBbiwtbm15z1jUyQTSGM8J+dULbLgDp1O1fQphXJKV
|
||||||
|
7dbkV5CXPoglOK/jy3bj4zSfG0Fr068aBkXvkBlJ2X088Xxoj59mep+ZI0SJj4L1
|
||||||
|
vK6EecRxUHUQUltarj9A2LaHw5iqG5QD5px3O1wN4xqiuh95FuwmvIa2DNlclgca
|
||||||
|
4xzWXUFwoJFoKLsaX8dkxC3Zs4YybSEpRntU8+ElQeAQoDB5gPgjvKoep+JHCSWZ
|
||||||
|
w+ZhNT+F5+tbRToPwyWn9k9lRBaDcDWQAOQtcoGxzZ0I4j0CWnC0uzfTJ48skhKk
|
||||||
|
xBo5sujuyZ3m0I/icdVY0hAt9Ok+3hB4hrvfMAT13zK0u+a917d6HKfeh95BNXAN
|
||||||
|
CzSUtC/J0VU1tk2cF71pJS6T1oTxm/+ptwPMclOiKqzgkxoZzITd08JLe6d50HCI
|
||||||
|
fw4LL7Z65HOE4kxHySxtCHWBw0d+44C+H9+g6SKWDNCUpv+xbc9VAMM6/rl8vW2G
|
||||||
|
bJHkSqxvx2mi3X3Ti4BLXNb7IWYjJVirTNtzYXbqgoPDotviuyoyB3v9bAGbg+gQ
|
||||||
|
KpzQJR+j+ODHITG9wJs14WtL/Ll3TO6Tz3XGfmgLiPs7N5oReNdQYrW5TadzttoQ
|
||||||
|
+WDKYoFXLXyHOT43BHRu+6V16Mpj/khdR7DFoj5AKbS3IoSuMiniowf2sztvtnsY
|
||||||
|
j7jwL3zaNv7qSf0p4TYo1HhXSggunaDRqBebpNVKAbHTU0ygiSizZAKIXb97/Gbp
|
||||||
|
rop1vSH0GNZWcV653vNFCKoSecVPwAA7LRQcW1RpyzE/NBdRLmh+rbONeh9FlJA4
|
||||||
|
JWpMK7RNA0JsaTy7Ti9/I7cYxUpAxP/6oHaH+P16bpoppyx6toH0Q94uXTU/Nlpl
|
||||||
|
PpipMYgTHN8SPwSBWUzIYQ==
|
||||||
|
@ -4,8 +4,10 @@ from loguru import logger
|
|||||||
try:
|
try:
|
||||||
srcConn = psycopg2.connect(database="level_monitoring_berresheim")
|
srcConn = psycopg2.connect(database="level_monitoring_berresheim")
|
||||||
srcConn.autocommit = False
|
srcConn.autocommit = False
|
||||||
|
destConn = psycopg2.connect(database="udi-berresheim")
|
||||||
|
destConn.autocommit = False
|
||||||
|
|
||||||
with srcConn.cursor() as srcCur:
|
with srcConn.cursor() as srcCur, destConn.cursor() as destCur:
|
||||||
srcCur.execute("select time, application_name, raw_level, level, status, battery from measurement_t")
|
srcCur.execute("select time, application_name, raw_level, level, status, battery from measurement_t")
|
||||||
for srcObj in srcCur:
|
for srcObj in srcCur:
|
||||||
timestamp = srcObj[0]
|
timestamp = srcObj[0]
|
||||||
@ -20,10 +22,17 @@ try:
|
|||||||
destTime = timestamp
|
destTime = timestamp
|
||||||
destApplication = "de-hottis-level-monitoring"
|
destApplication = "de-hottis-level-monitoring"
|
||||||
destDevice = "eui-a84041a2c18341d6"
|
destDevice = "eui-a84041a2c18341d6"
|
||||||
destAttributes = '{"ApplicationId":"de-hottis-level-monitoring", "DeviceType":"dragino-ldds75", "Hint": "Migrated"}'
|
destAttributes = '{"ApplicationId":"de-hottis-level-monitoring", "DeviceType":"dragino-ldds75", "Status":"' + status + '","Hint": "Migrated"}'
|
||||||
destValues = '{"Battery":{"unit":"V","label":"Battery","value":' + str(battery) + ',"variable":"Voltage"}, "Distance":{"unit":mm","label":"Distance","variable":"Level","value":' + str(rawLevel) + '}, "CorrectedDistance":{"unit":"mm", "label":"CorrectedDistance", "variable":"Level","value":' + str(level) + '}}'
|
destValues = '{"Battery":{"unit":"V","label":"Battery","value":' + str(battery) + ',"variable":"Voltage"}, "Distance":{"unit":"mm","label":"Distance","variable":"Level","value":' + str(rawLevel) + '}, "CorrectedDistance":{"unit":"mm", "label":"CorrectedDistance", "variable":"Level","value":' + str(level) + '}}'
|
||||||
logger.info(f"{destTime=}, {destApplication=}, {destDevice=}, {destAttributes=}, {destValues=}")
|
logger.info(f"{destTime=}, {destApplication=}, {destDevice=}, {destAttributes=}, {destValues=}")
|
||||||
|
|
||||||
|
destCur.execute("insert into measurements (time, application, device, attributes, values) values(%s, %s, %s, %s, %s)",
|
||||||
|
(destTime, destApplication, destDevice, destAttributes, destValues))
|
||||||
|
destConn.commit()
|
||||||
finally:
|
finally:
|
||||||
if srcConn:
|
if srcConn:
|
||||||
srcConn.close()
|
srcConn.close()
|
||||||
|
if destConn:
|
||||||
|
destConn.close()
|
||||||
|
|
||||||
|
|
||||||
|
79
migration/migrate-pv.py
Normal file
79
migration/migrate-pv.py
Normal file
@ -0,0 +1,79 @@
|
|||||||
|
import psycopg2
|
||||||
|
from loguru import logger
|
||||||
|
import os
|
||||||
|
|
||||||
|
srcPgHost = os.environ["SRC_PGHOST"]
|
||||||
|
srcPgUser = os.environ["SRC_PGUSER"]
|
||||||
|
srcPgPassword = os.environ["SRC_PGPASSWORD"]
|
||||||
|
srcPgDatabase = os.environ["SRC_PGDATABASE"]
|
||||||
|
destPgHost = os.environ["DEST_PGHOST"]
|
||||||
|
destPgUser = os.environ["DEST_PGUSER"]
|
||||||
|
destPgPassword = os.environ["DEST_PGPASSWORD"]
|
||||||
|
destPgDatabase = os.environ["DEST_PGDATABASE"]
|
||||||
|
|
||||||
|
try:
|
||||||
|
srcConn = psycopg2.connect(
|
||||||
|
host=srcPgHost,
|
||||||
|
dbname=srcPgDatabase,
|
||||||
|
user=srcPgUser,
|
||||||
|
password=srcPgPassword,
|
||||||
|
sslmode='require'
|
||||||
|
)
|
||||||
|
srcConn.autocommit = False
|
||||||
|
|
||||||
|
destConn = psycopg2.connect(
|
||||||
|
host=destPgHost,
|
||||||
|
dbname=destPgDatabase,
|
||||||
|
user=destPgUser,
|
||||||
|
password=destPgPassword,
|
||||||
|
sslmode='require'
|
||||||
|
)
|
||||||
|
destConn.autocommit = False
|
||||||
|
|
||||||
|
with srcConn.cursor() as srcCur, destConn.cursor() as destCur:
|
||||||
|
srcCur.execute("select time, deviceid, status, state, importenergyactive, importenergyreactive, exportenergyactive, exportenergyreactive, powerapparent, poweractive, powerreactive, powerdemandpositive, powerdemandreverse, factor, angle, voltage, current, powerdemand from pv_power_measurement_t order by time")
|
||||||
|
for srcObj in srcCur:
|
||||||
|
timestamp = srcObj[0]
|
||||||
|
deviceName = srcObj[1]
|
||||||
|
status = srcObj[2]
|
||||||
|
state = srcObj[3]
|
||||||
|
importenergyactive = srcObj[4]
|
||||||
|
importenergyreactive = srcObj[5]
|
||||||
|
exportenergyactive = srcObj[6]
|
||||||
|
exportenergyreactive = srcObj[7]
|
||||||
|
powerapparent = srcObj[8]
|
||||||
|
poweractive = srcObj[9]
|
||||||
|
powerreactive = srcObj[10]
|
||||||
|
powerdemandpositive = srcObj[11]
|
||||||
|
powerdemandreverse = srcObj[12]
|
||||||
|
factor = srcObj[13]
|
||||||
|
angle = srcObj[14]
|
||||||
|
voltage = srcObj[15]
|
||||||
|
current = srcObj[16]
|
||||||
|
powerdemand = srcObj[17]
|
||||||
|
|
||||||
|
|
||||||
|
logger.info(f"{timestamp=}, {deviceName=}")
|
||||||
|
|
||||||
|
destTime = timestamp
|
||||||
|
destApplication = "PV"
|
||||||
|
destDevice = "Powermeter"
|
||||||
|
destAttributes = f"{{\"ApplicationId\":\"PV\", \"Status\":\"{status}\",\"Hint\": \"Migrated\"}}"
|
||||||
|
destValues = f"{{\"Cnt\": {{\"unit\": \"\", \"label\": \"\", \"value\": \"-1\", \"variable\": \"Cnt\"}}, \"Angle\": {{\"unit\": \"degree\", \"label\": \"\", \"value\": \"{angle}\", \"variable\": \"Angle\"}}, \"State\": {{\"unit\": \"\", \"label\": \"\", \"value\": \"{state}\", \"variable\": \"State\"}}, \"Factor\": {{\"unit\": \"\", \"label\": \"\", \"value\": \"{factor}\", \"variable\": \"Factor\"}}, \"Current\": {{\"unit\": \"A\", \"label\": \"\", \"value\": \"{current}\", \"variable\": \"Current\"}}, \"Voltage\": {{\"unit\": \"V\", \"label\": \"\", \"value\": \"{voltage}\", \"variable\": \"Voltage\"}}, \"PowerActive\": {{\"unit\": \"W\", \"label\": \"\", \"value\": \"{poweractive}\", \"variable\": \"PowerActive\"}}, \"PowerApparent\": {{\"unit\": \"VA\", \"label\": \"\", \"value\": \"{powerapparent}\", \"variable\": \"PowerApparent\"}}, \"PowerReactive\": {{\"unit\": \"VA\", \"label\": \"\", \"value\": \"{powerreactive}\", \"variable\": \"PowerReactive\"}}, \"ExportEnergyActive\": {{\"unit\": \"Wh\", \"label\": \"\", \"value\": \"{exportenergyactive}\", \"variable\": \"ExportEnergyActive\"}}, \"ImportEnergyActive\": {{\"unit\": \"Wh\", \"label\": \"\", \"value\": \"{importenergyactive}\", \"variable\": \"ImportEnergyActive\"}}, \"PowerDemandReverse\": {{\"unit\": \"W\", \"label\": \"\", \"value\": \"{powerdemandreverse}\", \"variable\": \"PowerDemandReverse\"}}, \"PowerDemandPositive\": {{\"unit\": \"W\", \"label\": \"\", \"value\": \"{powerdemandpositive}\", \"variable\": \"PowerDemandPositive\"}}, \"ExportEnergyReactive\": {{\"unit\": \"VAh\", \"label\": \"\", \"value\": \"{exportenergyreactive}\", \"variable\": \"ExportEnergyReactive\"}}, \"ImportEnergyReactive\": {{\"unit\": \"VAh\", \"label\": \"\", \"value\": \"{importenergyreactive}\", \"variable\": \"ImportEnergyReactive\"}}}}"
|
||||||
|
logger.info(f"{destTime=}, {destApplication=}, {destDevice=}, {destAttributes=}, {destValues=}")
|
||||||
|
|
||||||
|
|
||||||
|
try:
|
||||||
|
destCur.execute("insert into measurements (time, application, device, attributes, values) values(%s, %s, %s, %s, %s)",
|
||||||
|
(destTime, destApplication, destDevice, destAttributes, destValues))
|
||||||
|
destConn.commit()
|
||||||
|
except Exception as e:
|
||||||
|
destConn.rollback()
|
||||||
|
logger.error(f"Error {e} when inserted time {destTime}")
|
||||||
|
finally:
|
||||||
|
if srcConn:
|
||||||
|
srcConn.close()
|
||||||
|
if destConn:
|
||||||
|
destConn.close()
|
||||||
|
|
||||||
|
|
78
migration/migrate-temperature.py
Normal file
78
migration/migrate-temperature.py
Normal file
@ -0,0 +1,78 @@
|
|||||||
|
import psycopg2
|
||||||
|
from loguru import logger
|
||||||
|
import os
|
||||||
|
|
||||||
|
srcPgHost = os.environ["SRC_PGHOST"]
|
||||||
|
srcPgUser = os.environ["SRC_PGUSER"]
|
||||||
|
srcPgPassword = os.environ["SRC_PGPASSWORD"]
|
||||||
|
srcPgDatabase = os.environ["SRC_PGDATABASE"]
|
||||||
|
destPgHost = os.environ["DEST_PGHOST"]
|
||||||
|
destPgUser = os.environ["DEST_PGUSER"]
|
||||||
|
destPgPassword = os.environ["DEST_PGPASSWORD"]
|
||||||
|
destPgDatabase = os.environ["DEST_PGDATABASE"]
|
||||||
|
|
||||||
|
try:
|
||||||
|
srcConn = psycopg2.connect(
|
||||||
|
host=srcPgHost,
|
||||||
|
dbname=srcPgDatabase,
|
||||||
|
user=srcPgUser,
|
||||||
|
password=srcPgPassword,
|
||||||
|
sslmode='require'
|
||||||
|
)
|
||||||
|
srcConn.autocommit = False
|
||||||
|
|
||||||
|
destConn = psycopg2.connect(
|
||||||
|
host=destPgHost,
|
||||||
|
dbname=destPgDatabase,
|
||||||
|
user=destPgUser,
|
||||||
|
password=destPgPassword,
|
||||||
|
sslmode='require'
|
||||||
|
)
|
||||||
|
destConn.autocommit = False
|
||||||
|
|
||||||
|
with srcConn.cursor() as srcCur, destConn.cursor() as destCur:
|
||||||
|
srcCur.execute("select time, location, status, temperature, category from room_climate_measurement_t where category = 'heating' and time > '2023-12-19 05:20:00' order by time")
|
||||||
|
for srcObj in srcCur:
|
||||||
|
timestamp = srcObj[0]
|
||||||
|
location = srcObj[1]
|
||||||
|
status = srcObj[2]
|
||||||
|
temperature = srcObj[3]
|
||||||
|
category = srcObj[4]
|
||||||
|
|
||||||
|
logger.info(f"{timestamp=}, {location=}, {status=}, {temperature=}, {category=}")
|
||||||
|
|
||||||
|
destTime = timestamp
|
||||||
|
|
||||||
|
match category:
|
||||||
|
case 'heating':
|
||||||
|
destApplication = 'Temperature Heating'
|
||||||
|
case 'Outdoor':
|
||||||
|
destApplication = 'Temperature Wago'
|
||||||
|
case 'Device':
|
||||||
|
destApplication = 'Temperature Wago'
|
||||||
|
case 'Indoor':
|
||||||
|
destApplication = 'Temperature Multisensor' if location != 'Anna-Koeln-2' else 'Temperature Shelly Plus HT'
|
||||||
|
case 'Special':
|
||||||
|
destApplication = 'Temperature Multisensor'
|
||||||
|
|
||||||
|
destDevice = location
|
||||||
|
destAttributes = '{"ApplicationId":"temperature-imported", "Status":"' + status + '","Location":"' + location + '","Category":"' + category + '","Hint": "Migrated"}'
|
||||||
|
destValues = '{"Value": {"unit": "°C", "label": "", "value": "' + str(temperature) + '", "variable": ""}}'
|
||||||
|
|
||||||
|
logger.info(f"{destTime=}, {destApplication=}, {destDevice=}, {destAttributes=}, {destValues=}")
|
||||||
|
|
||||||
|
try:
|
||||||
|
destCur.execute("insert into measurements (time, application, device, attributes, values) values(%s, %s, %s, %s, %s)",
|
||||||
|
(destTime, destApplication, destDevice, destAttributes, destValues))
|
||||||
|
destConn.commit()
|
||||||
|
except Exception as e:
|
||||||
|
destConn.rollback()
|
||||||
|
logger.error(f"Error {e} when inserted time {destTime}")
|
||||||
|
|
||||||
|
finally:
|
||||||
|
if srcConn:
|
||||||
|
srcConn.close()
|
||||||
|
|
||||||
|
if destConn:
|
||||||
|
destConn.close()
|
||||||
|
|
8
queries/berresheim.sql
Normal file
8
queries/berresheim.sql
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
create or replace view level_v as
|
||||||
|
select time,
|
||||||
|
cast(values->'CorrectedDistance'->>'value' as float) as level,
|
||||||
|
cast(values->'Battery'->>'value' as float) as battery,
|
||||||
|
attributes->>'Status' as status,
|
||||||
|
device
|
||||||
|
from measurements
|
||||||
|
where application = 'de-hottis-level-monitoring';
|
@ -45,3 +45,25 @@ create or replace view temperature_v as
|
|||||||
from measurements
|
from measurements
|
||||||
where application in ('Temperature Multisensor', 'Temperature Shelly Plus HT');
|
where application in ('Temperature Multisensor', 'Temperature Shelly Plus HT');
|
||||||
|
|
||||||
|
create or replace view temperature2_v as
|
||||||
|
select time,
|
||||||
|
cast(values->'Value'->>'value' as float) as temperature,
|
||||||
|
device
|
||||||
|
from measurements
|
||||||
|
where application = 'Temperature Wago';
|
||||||
|
|
||||||
|
create or replace view humidity_v as
|
||||||
|
select time,
|
||||||
|
cast(values->'Value'->>'value' as float) as humidity,
|
||||||
|
device
|
||||||
|
from measurements
|
||||||
|
where application in ('Humidity Multisensor');
|
||||||
|
|
||||||
|
create or replace view soil_v as
|
||||||
|
select time,
|
||||||
|
cast(values->'Water'->>'value' as float) as water,
|
||||||
|
cast(values->'Conductance'->>'value' as float) as conductance,
|
||||||
|
cast(values->'Temperature'->>'value' as float) as temperature,
|
||||||
|
device
|
||||||
|
from measurements
|
||||||
|
where application = 'de-hottis-app01' and attributes->>'DeviceType' = 'dragino-lse01';
|
||||||
|
11
queries/old-daily-temperature-query.sql
Normal file
11
queries/old-daily-temperature-query.sql
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
select
|
||||||
|
extract('day' from time)::varchar || '.' || extract('month' from time)::varchar || '.' || extract('year' from time)::varchar as day,
|
||||||
|
avg(temperature)::numeric(10,0) as temperature
|
||||||
|
from room_climate_measurement_t
|
||||||
|
where
|
||||||
|
category = 'Outdoor' and
|
||||||
|
location = 'Outdoor' and
|
||||||
|
extract('hour' from time) = 12 and
|
||||||
|
time::date = now()::date
|
||||||
|
group by day
|
||||||
|
|
73
queries/old-pv-yield-query.sql
Normal file
73
queries/old-pv-yield-query.sql
Normal file
@ -0,0 +1,73 @@
|
|||||||
|
-- query
|
||||||
|
|
||||||
|
with
|
||||||
|
first_day_in_year as (
|
||||||
|
select
|
||||||
|
date_trunc('day', min(time)) as day
|
||||||
|
from pv_power_measurement_t
|
||||||
|
where
|
||||||
|
time between date_trunc('year', time) and now()
|
||||||
|
),
|
||||||
|
first_value_in_year as (
|
||||||
|
select
|
||||||
|
time_bucket('1 day', time) as interval,
|
||||||
|
first(exportenergyactive, time) as energy
|
||||||
|
from pv_power_measurement_t
|
||||||
|
where
|
||||||
|
time between (select day from first_day_in_year) and (select day from first_day_in_year) + interval '1 day' and
|
||||||
|
status = 'Ok'
|
||||||
|
group by interval
|
||||||
|
),
|
||||||
|
first_day_in_month as (
|
||||||
|
select
|
||||||
|
date_trunc('day', min(time)) as day
|
||||||
|
from pv_power_measurement_t
|
||||||
|
where
|
||||||
|
time between date_trunc('month', now()) and now()
|
||||||
|
),
|
||||||
|
first_value_in_month as (
|
||||||
|
select
|
||||||
|
time_bucket('1 day', time) as interval,
|
||||||
|
first(exportenergyactive, time) as energy
|
||||||
|
from pv_power_measurement_t
|
||||||
|
where
|
||||||
|
time between (select day from first_day_in_month) and (select day from first_day_in_month) + interval '1 day' and
|
||||||
|
status = 'Ok'
|
||||||
|
group by interval
|
||||||
|
),
|
||||||
|
first_value_in_day as (
|
||||||
|
select
|
||||||
|
time_bucket('1 day', time) as interval,
|
||||||
|
first(exportenergyactive, time) as energy
|
||||||
|
from pv_power_measurement_t
|
||||||
|
where time >= date_trunc('day', now())
|
||||||
|
group by interval
|
||||||
|
),
|
||||||
|
last_value as (
|
||||||
|
select
|
||||||
|
time_bucket('1 day', time) as interval,
|
||||||
|
last(exportenergyactive, time) as energy
|
||||||
|
from pv_power_measurement_t
|
||||||
|
where
|
||||||
|
time between date_trunc('day', now()) and date_trunc('day', now()) + interval '1 day' and
|
||||||
|
status = 'Ok'
|
||||||
|
group by interval
|
||||||
|
)
|
||||||
|
select
|
||||||
|
extract(year from (select day from first_day_in_year))::text as period_value,
|
||||||
|
'Year' as period_name,
|
||||||
|
round(((select energy from last_value) - (select energy from first_value_in_year))::numeric, 2) as yield
|
||||||
|
union
|
||||||
|
select
|
||||||
|
to_char((select day from first_day_in_month), 'Month') as period_value,
|
||||||
|
'Month' as period_name,
|
||||||
|
round(((select energy from last_value) - (select energy from first_value_in_month))::numeric, 2) as yield
|
||||||
|
union
|
||||||
|
select
|
||||||
|
now()::date::text as period_value,
|
||||||
|
'Day' as period_name,
|
||||||
|
round(((select energy from last_value) - (select energy from first_value_in_day))::numeric, 2) as yield;
|
||||||
|
|
||||||
|
-- output format
|
||||||
|
-- wn@atuin:~/Workspace/go-workspace/src/universal-data-ingest [main ≡ +0 ~1 -0 !]$ mosquitto_sub -h 172.23.1.102 -v -t IoT/PV/Yields
|
||||||
|
-- IoT/PV/Yields {"Month":"1.43","Year":"285.39","Day":"0.00"}
|
@ -3,13 +3,7 @@ create or replace view power_v as
|
|||||||
cast(values->'ActivePowerL1'->>'value' as float) as power_l1,
|
cast(values->'ActivePowerL1'->>'value' as float) as power_l1,
|
||||||
cast(values->'ActivePowerL2'->>'value' as float) as power_l2,
|
cast(values->'ActivePowerL2'->>'value' as float) as power_l2,
|
||||||
cast(values->'ActivePowerL3'->>'value' as float) as power_l3,
|
cast(values->'ActivePowerL3'->>'value' as float) as power_l3,
|
||||||
device
|
cast(values->'ActivePowerL123'->>'value' as float) as power_total,
|
||||||
from measurements
|
|
||||||
where application = 'com-passavant-geiger-poc' and
|
|
||||||
attributes->>'FPort' = '1';
|
|
||||||
|
|
||||||
create or replace view power_factor_v as
|
|
||||||
select time,
|
|
||||||
cast(values->'PowerfactorL1'->>'value' as float) as factor_l1,
|
cast(values->'PowerfactorL1'->>'value' as float) as factor_l1,
|
||||||
cast(values->'PowerfactorL2'->>'value' as float) as factor_l2,
|
cast(values->'PowerfactorL2'->>'value' as float) as factor_l2,
|
||||||
cast(values->'PowerfactorL3'->>'value' as float) as factor_l3,
|
cast(values->'PowerfactorL3'->>'value' as float) as factor_l3,
|
||||||
|
@ -2,9 +2,13 @@ if [ "$1" = "" ]; then
|
|||||||
echo "set namespace as argument"
|
echo "set namespace as argument"
|
||||||
fi
|
fi
|
||||||
N=$1
|
N=$1
|
||||||
|
if [ "$2" = "" ]; then
|
||||||
|
echo "set instance as argument"
|
||||||
|
fi
|
||||||
|
I=$2
|
||||||
PGHOST=`kubectl get services traefik -n system -o jsonpath="{.status.loadBalancer.ingress[0].ip}"`
|
PGHOST=`kubectl get services traefik -n system -o jsonpath="{.status.loadBalancer.ingress[0].ip}"`
|
||||||
PGPASSWORD=`kubectl get secrets udi-db-cred -n $N -o jsonpath="{.data.PGPASSWORD}" | base64 --decode`
|
PGPASSWORD=`kubectl get secrets $I-udi-db-cred -n $N -o jsonpath="{.data.PGPASSWORD}" | base64 --decode`
|
||||||
PGUSER=`kubectl get secrets udi-db-cred -n $N -o jsonpath="{.data.PGUSER}" | base64 --decode`
|
PGUSER=`kubectl get secrets $I-udi-db-cred -n $N -o jsonpath="{.data.PGUSER}" | base64 --decode`
|
||||||
PGSSLMODE=`kubectl get secrets udi-db-cred -n $N -o jsonpath="{.data.PGSSLMODE}" | base64 --decode`
|
PGSSLMODE=`kubectl get secrets $I-udi-db-cred -n $N -o jsonpath="{.data.PGSSLMODE}" | base64 --decode`
|
||||||
PGDATABASE=`kubectl get secrets udi-db-cred -n $N -o jsonpath="{.data.PGDATABASE}" | base64 --decode`
|
PGDATABASE=`kubectl get secrets $I-udi-db-cred -n $N -o jsonpath="{.data.PGDATABASE}" | base64 --decode`
|
||||||
export PGUSER PGHOST PGPASSWORD PGSSLMODE PGDATABASE
|
export PGUSER PGHOST PGPASSWORD PGSSLMODE PGDATABASE
|
||||||
|
@ -2,7 +2,7 @@ package draginoLdds75
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"log"
|
// "log"
|
||||||
"strings"
|
"strings"
|
||||||
"strconv"
|
"strconv"
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
@ -26,7 +26,7 @@ type message struct {
|
|||||||
TempC_DS18B20 string `json:"TempC_DS18B20"`
|
TempC_DS18B20 string `json:"TempC_DS18B20"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func Parse(fPort int, decodedPayload []byte, variables *map[string]database.VariableType, device *database.Device) error {
|
func Parse(fPort int, decodedPayload []byte, _ string, variables *map[string]database.VariableType, attributes *map[string]interface{}, device *database.Device) error {
|
||||||
if fPort != 2 {
|
if fPort != 2 {
|
||||||
return fmt.Errorf("Unexpected fPort %d", fPort)
|
return fmt.Errorf("Unexpected fPort %d", fPort)
|
||||||
}
|
}
|
||||||
@ -55,11 +55,20 @@ func Parse(fPort int, decodedPayload []byte, variables *map[string]database.Vari
|
|||||||
Unit: "mm",
|
Unit: "mm",
|
||||||
Value: distance,
|
Value: distance,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if distance == 20 {
|
||||||
|
(*attributes)["Status"] = "invalid value"
|
||||||
|
} else if distance == 0 {
|
||||||
|
(*attributes)["Status"] = "no sensor detected"
|
||||||
|
} else {
|
||||||
|
(*attributes)["Status"] = "Ok"
|
||||||
|
}
|
||||||
|
|
||||||
groundLevelI, exists := device.Attributes["GroundLevel"]
|
groundLevelI, exists := device.Attributes["GroundLevel"]
|
||||||
groundLevelS, ok := groundLevelI.(string)
|
groundLevelS, ok := groundLevelI.(string)
|
||||||
groundLevel, err3 := strconv.Atoi(groundLevelS)
|
groundLevel, err3 := strconv.Atoi(groundLevelS)
|
||||||
if exists && err3 == nil && ok {
|
if exists && err3 == nil && ok {
|
||||||
log.Println("add corrected distance")
|
//log.Println("add corrected distance")
|
||||||
correctedDistance := groundLevel - distance
|
correctedDistance := groundLevel - distance
|
||||||
(*variables)["CorrectedDistance"] = database.VariableType {
|
(*variables)["CorrectedDistance"] = database.VariableType {
|
||||||
Label: "CorrectedDistance",
|
Label: "CorrectedDistance",
|
||||||
@ -67,11 +76,11 @@ func Parse(fPort int, decodedPayload []byte, variables *map[string]database.Vari
|
|||||||
Unit: "mm",
|
Unit: "mm",
|
||||||
Value: correctedDistance,
|
Value: correctedDistance,
|
||||||
}
|
}
|
||||||
} else {
|
} /* else {
|
||||||
log.Printf("no ground level: %s %s %s", exists, err3, ok)
|
log.Printf("no ground level: %s %s %s", exists, err3, ok)
|
||||||
log.Printf("Device: %s", device)
|
log.Printf("Device: %s", device)
|
||||||
log.Printf("Attributes: %s", device.Attributes)
|
log.Printf("Attributes: %s", device.Attributes)
|
||||||
}
|
} */
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
@ -26,7 +26,7 @@ type message struct {
|
|||||||
Dis2 int `json:"dis2"`
|
Dis2 int `json:"dis2"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func Parse(fPort int, decodedPayload []byte, variables *map[string]database.VariableType, device *database.Device) error {
|
func Parse(fPort int, decodedPayload []byte, _ string, variables *map[string]database.VariableType, attributes *map[string]interface{}, device *database.Device) error {
|
||||||
if fPort != 2 {
|
if fPort != 2 {
|
||||||
return fmt.Errorf("Unexpected fPort %d", fPort)
|
return fmt.Errorf("Unexpected fPort %d", fPort)
|
||||||
}
|
}
|
||||||
@ -55,6 +55,15 @@ func Parse(fPort int, decodedPayload []byte, variables *map[string]database.Vari
|
|||||||
Unit: "mm",
|
Unit: "mm",
|
||||||
Value: distance2,
|
Value: distance2,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if distance1 == 2 {
|
||||||
|
(*attributes)["Status"] = "invalid value"
|
||||||
|
} else if distance1 == 1 {
|
||||||
|
(*attributes)["Status"] = "no sensor detected"
|
||||||
|
} else {
|
||||||
|
(*attributes)["Status"] = "Ok"
|
||||||
|
}
|
||||||
|
|
||||||
groundLevelI, exists := device.Attributes["GroundLevel"]
|
groundLevelI, exists := device.Attributes["GroundLevel"]
|
||||||
groundLevelS, ok := groundLevelI.(string)
|
groundLevelS, ok := groundLevelI.(string)
|
||||||
groundLevel, err3 := strconv.Atoi(groundLevelS)
|
groundLevel, err3 := strconv.Atoi(groundLevelS)
|
||||||
|
@ -24,7 +24,7 @@ type message struct {
|
|||||||
Water_SOIL string `json:"water_SOIL"`
|
Water_SOIL string `json:"water_SOIL"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func Parse(fPort int, decodedPayload []byte, variables *map[string]database.VariableType, device *database.Device) error {
|
func Parse(fPort int, decodedPayload []byte, _ string, variables *map[string]database.VariableType, _ *map[string]interface{}, _ *database.Device) error {
|
||||||
if fPort != 2 {
|
if fPort != 2 {
|
||||||
return fmt.Errorf("Unexpected fPort %d", fPort)
|
return fmt.Errorf("Unexpected fPort %d", fPort)
|
||||||
}
|
}
|
||||||
|
@ -177,7 +177,7 @@ type emuMessage1 struct {
|
|||||||
|
|
||||||
|
|
||||||
|
|
||||||
func Parse(fPort int, decodedPayload []byte, variables *map[string]database.VariableType, _ *database.Device) error {
|
func Parse(fPort int, decodedPayload []byte, _ string, variables *map[string]database.VariableType, _ *map[string]interface{}, _ *database.Device) error {
|
||||||
//log.Printf("Parse input: %d, %s", fPort, decodedPayload)
|
//log.Printf("Parse input: %d, %s", fPort, decodedPayload)
|
||||||
switch fPort {
|
switch fPort {
|
||||||
case 1:
|
case 1:
|
||||||
|
38
src/udi/handlers/ttn/models/hottisScd30/hottisScd30.go
Normal file
38
src/udi/handlers/ttn/models/hottisScd30/hottisScd30.go
Normal file
@ -0,0 +1,38 @@
|
|||||||
|
package hottisScd30
|
||||||
|
|
||||||
|
import (
|
||||||
|
"log"
|
||||||
|
"fmt"
|
||||||
|
"bytes"
|
||||||
|
"encoding/base64"
|
||||||
|
"encoding/binary"
|
||||||
|
"udi/database"
|
||||||
|
)
|
||||||
|
|
||||||
|
type hottisScd30Values struct {
|
||||||
|
Status uint8
|
||||||
|
CO2Conc int32
|
||||||
|
Temp int32
|
||||||
|
Hum int32
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
func Parse(fPort int, _ []byte, frmPayload string, variables *map[string]database.VariableType, _ *map[string]interface{}, _ *database.Device) error {
|
||||||
|
if fPort != 2 {
|
||||||
|
return fmt.Errorf("Unexpected fPort %d", fPort)
|
||||||
|
}
|
||||||
|
|
||||||
|
b, err := base64.StdEncoding.DecodeString(frmPayload)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("Unable to base64-decode payload: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
var values hottisScd30Values
|
||||||
|
err = binary.Read(bytes.NewReader(b), binary.LittleEndian, &values)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("Unable to cast into struct: %v", err)
|
||||||
|
}
|
||||||
|
log.Printf("CO2: %d, Temp: %d, Hum: %d, Status: %d", values.CO2Conc, values.Temp, values.Hum, values.Status)
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
@ -0,0 +1,25 @@
|
|||||||
|
package rawPayloadPrinter
|
||||||
|
|
||||||
|
import (
|
||||||
|
"log"
|
||||||
|
"fmt"
|
||||||
|
"encoding/base64"
|
||||||
|
"encoding/hex"
|
||||||
|
"udi/database"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
func Parse(fPort int, _ []byte, frmPayload string, variables *map[string]database.VariableType, _ *map[string]interface{}, _ *database.Device) error {
|
||||||
|
if fPort != 2 {
|
||||||
|
return fmt.Errorf("Unexpected fPort %d", fPort)
|
||||||
|
}
|
||||||
|
|
||||||
|
bytes, err := base64.StdEncoding.DecodeString(frmPayload)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("Unable to base64-decode payload: %v", err)
|
||||||
|
}
|
||||||
|
hexString := hex.EncodeToString(bytes)
|
||||||
|
|
||||||
|
log.Printf("Payload: %s", hexString)
|
||||||
|
return nil
|
||||||
|
}
|
@ -2,7 +2,7 @@ package ttn
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"log"
|
//"log"
|
||||||
"time"
|
"time"
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"udi/config"
|
"udi/config"
|
||||||
@ -11,6 +11,8 @@ import (
|
|||||||
"udi/handlers/ttn/models/draginoLdds75"
|
"udi/handlers/ttn/models/draginoLdds75"
|
||||||
"udi/handlers/ttn/models/draginoLmds200"
|
"udi/handlers/ttn/models/draginoLmds200"
|
||||||
"udi/handlers/ttn/models/draginoLse01"
|
"udi/handlers/ttn/models/draginoLse01"
|
||||||
|
"udi/handlers/ttn/models/rawPayloadPrinter"
|
||||||
|
"udi/handlers/ttn/models/hottisScd30"
|
||||||
"udi/database"
|
"udi/database"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -85,7 +87,7 @@ func New(id string, config config.HandlerConfigT) handler.Handler {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (self *TTNHandler) Handle(message handler.MessageT) {
|
func (self *TTNHandler) Handle(message handler.MessageT) {
|
||||||
log.Printf("Handler TTN %d processing %s -> %s", self.Id, message.Topic, message.Payload)
|
//log.Printf("Handler TTN %d processing %s -> %s", self.Id, message.Topic, message.Payload)
|
||||||
|
|
||||||
var measurement database.Measurement
|
var measurement database.Measurement
|
||||||
measurement.Time = time.Now()
|
measurement.Time = time.Now()
|
||||||
@ -96,7 +98,7 @@ func (self *TTNHandler) Handle(message handler.MessageT) {
|
|||||||
self.Lost("Error when unmarshaling message", err, message)
|
self.Lost("Error when unmarshaling message", err, message)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
log.Printf("Parsed message: %s", uplinkMessage)
|
//log.Printf("Parsed message: %s", uplinkMessage)
|
||||||
|
|
||||||
var attributes attributes
|
var attributes attributes
|
||||||
attributes.DeviceId = uplinkMessage.EndDeviceIds.DeviceId
|
attributes.DeviceId = uplinkMessage.EndDeviceIds.DeviceId
|
||||||
@ -106,11 +108,11 @@ func (self *TTNHandler) Handle(message handler.MessageT) {
|
|||||||
attributes.FrmPayload = uplinkMessage.UplinkMessage.FrmPayload
|
attributes.FrmPayload = uplinkMessage.UplinkMessage.FrmPayload
|
||||||
attributes.ConsumedAirtime = uplinkMessage.UplinkMessage.ConsumedAirtime
|
attributes.ConsumedAirtime = uplinkMessage.UplinkMessage.ConsumedAirtime
|
||||||
for _, rxm := range uplinkMessage.UplinkMessage.RxMetadata {
|
for _, rxm := range uplinkMessage.UplinkMessage.RxMetadata {
|
||||||
log.Printf("RXM: %s", rxm)
|
//log.Printf("RXM: %s", rxm)
|
||||||
g := gatewayAttributes { GatewayId: rxm.GatewayIds.GatewayId, Rssi: rxm.Rssi, Snr: rxm.Snr }
|
g := gatewayAttributes { GatewayId: rxm.GatewayIds.GatewayId, Rssi: rxm.Rssi, Snr: rxm.Snr }
|
||||||
attributes.Gateways = append(attributes.Gateways, g)
|
attributes.Gateways = append(attributes.Gateways, g)
|
||||||
}
|
}
|
||||||
log.Printf("Attributes: %s", attributes)
|
//log.Printf("Attributes: %s", attributes)
|
||||||
measurement.Attributes = map[string]interface{} {
|
measurement.Attributes = map[string]interface{} {
|
||||||
"DeviceId": attributes.DeviceId,
|
"DeviceId": attributes.DeviceId,
|
||||||
"ApplicationId": attributes.ApplicationId,
|
"ApplicationId": attributes.ApplicationId,
|
||||||
@ -121,7 +123,7 @@ func (self *TTNHandler) Handle(message handler.MessageT) {
|
|||||||
"ConsumedAirtime": attributes.ConsumedAirtime,
|
"ConsumedAirtime": attributes.ConsumedAirtime,
|
||||||
}
|
}
|
||||||
|
|
||||||
log.Printf("ApplicationId: %s, DeviceId: %s", attributes.ApplicationId, attributes.DeviceId)
|
//log.Printf("ApplicationId: %s, DeviceId: %s", attributes.ApplicationId, attributes.DeviceId)
|
||||||
device, err2 := self.dbh.GetDeviceByLabelAndApplication(attributes.ApplicationId, attributes.DeviceId)
|
device, err2 := self.dbh.GetDeviceByLabelAndApplication(attributes.ApplicationId, attributes.DeviceId)
|
||||||
if err2 != nil {
|
if err2 != nil {
|
||||||
self.Lost("Error when loading device", err2, message)
|
self.Lost("Error when loading device", err2, message)
|
||||||
@ -131,9 +133,9 @@ func (self *TTNHandler) Handle(message handler.MessageT) {
|
|||||||
measurement.Device = attributes.DeviceId
|
measurement.Device = attributes.DeviceId
|
||||||
measurement.Attributes["DeviceType"] = device.DeviceType.ModelIdentifier
|
measurement.Attributes["DeviceType"] = device.DeviceType.ModelIdentifier
|
||||||
|
|
||||||
log.Printf("DeviceLabel: %s, DeviceType: %s", device.Label, device.DeviceType.ModelIdentifier)
|
//log.Printf("DeviceLabel: %s, DeviceType: %s", device.Label, device.DeviceType.ModelIdentifier)
|
||||||
|
|
||||||
var parser func(int, []byte, *map[string]database.VariableType, *database.Device) error
|
var parser func(int, []byte, string, *map[string]database.VariableType, *map[string]interface{}, *database.Device) error
|
||||||
switch device.DeviceType.ModelIdentifier {
|
switch device.DeviceType.ModelIdentifier {
|
||||||
case "emu-prof-ii-lora-cfg1":
|
case "emu-prof-ii-lora-cfg1":
|
||||||
parser = emuProfIILoRaCfg1.Parse
|
parser = emuProfIILoRaCfg1.Parse
|
||||||
@ -143,18 +145,27 @@ func (self *TTNHandler) Handle(message handler.MessageT) {
|
|||||||
parser = draginoLmds200.Parse
|
parser = draginoLmds200.Parse
|
||||||
case "dragino-lse01":
|
case "dragino-lse01":
|
||||||
parser = draginoLse01.Parse
|
parser = draginoLse01.Parse
|
||||||
|
case "raw-payload-printer":
|
||||||
|
parser = rawPayloadPrinter.Parse
|
||||||
|
case "hottis-scd30":
|
||||||
|
parser = hottisScd30.Parse
|
||||||
default:
|
default:
|
||||||
self.Lost(fmt.Sprintf("No parser found for %s", device.DeviceType.ModelIdentifier), nil, message)
|
self.Lost(fmt.Sprintf("No parser found for %s", device.DeviceType.ModelIdentifier), nil, message)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
measurement.Values = make(map[string]database.VariableType)
|
measurement.Values = make(map[string]database.VariableType)
|
||||||
err3 := parser(uplinkMessage.UplinkMessage.FPort, uplinkMessage.UplinkMessage.DecodedPayload.Payload, &(measurement.Values), device)
|
err3 := parser(uplinkMessage.UplinkMessage.FPort,
|
||||||
|
uplinkMessage.UplinkMessage.DecodedPayload.Payload,
|
||||||
|
uplinkMessage.UplinkMessage.FrmPayload,
|
||||||
|
&(measurement.Values),
|
||||||
|
&(measurement.Attributes),
|
||||||
|
device)
|
||||||
if err3 != nil {
|
if err3 != nil {
|
||||||
self.Lost("Model parser failed", err3, message)
|
self.Lost("Model parser failed", err3, message)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
log.Printf("Prepared measurement item: %s", measurement)
|
//log.Printf("Prepared measurement item: %s", measurement)
|
||||||
self.dbh.StoreMeasurement(&measurement)
|
self.dbh.StoreMeasurement(&measurement)
|
||||||
self.S()
|
self.S()
|
||||||
}
|
}
|
||||||
|
Reference in New Issue
Block a user