Compare commits
11 Commits
Author | SHA1 | Date | |
---|---|---|---|
f44664eaad
|
|||
15458b9955
|
|||
f55990cc57
|
|||
766355f85d
|
|||
73aaa2225d
|
|||
44b7461d19
|
|||
c54b335e5f
|
|||
956d1bdcdb
|
|||
b938d48c7f
|
|||
b374b7f49d
|
|||
879825a260
|
@ -4,8 +4,10 @@ from loguru import logger
|
||||
try:
|
||||
srcConn = psycopg2.connect(database="level_monitoring_berresheim")
|
||||
srcConn.autocommit = False
|
||||
destConn = psycopg2.connect(database="udi-berresheim")
|
||||
destConn.autocommit = False
|
||||
|
||||
with srcConn.cursor() as srcCur:
|
||||
with srcConn.cursor() as srcCur, destConn.cursor() as destCur:
|
||||
srcCur.execute("select time, application_name, raw_level, level, status, battery from measurement_t")
|
||||
for srcObj in srcCur:
|
||||
timestamp = srcObj[0]
|
||||
@ -20,10 +22,17 @@ try:
|
||||
destTime = timestamp
|
||||
destApplication = "de-hottis-level-monitoring"
|
||||
destDevice = "eui-a84041a2c18341d6"
|
||||
destAttributes = '{"ApplicationId":"de-hottis-level-monitoring", "DeviceType":"dragino-ldds75", "Hint": "Migrated"}'
|
||||
destValues = '{"Battery":{"unit":"V","label":"Battery","value":' + str(battery) + ',"variable":"Voltage"}, "Distance":{"unit":mm","label":"Distance","variable":"Level","value":' + str(rawLevel) + '}, "CorrectedDistance":{"unit":"mm", "label":"CorrectedDistance", "variable":"Level","value":' + str(level) + '}}'
|
||||
destAttributes = '{"ApplicationId":"de-hottis-level-monitoring", "DeviceType":"dragino-ldds75", "Status":"' + status + '","Hint": "Migrated"}'
|
||||
destValues = '{"Battery":{"unit":"V","label":"Battery","value":' + str(battery) + ',"variable":"Voltage"}, "Distance":{"unit":"mm","label":"Distance","variable":"Level","value":' + str(rawLevel) + '}, "CorrectedDistance":{"unit":"mm", "label":"CorrectedDistance", "variable":"Level","value":' + str(level) + '}}'
|
||||
logger.info(f"{destTime=}, {destApplication=}, {destDevice=}, {destAttributes=}, {destValues=}")
|
||||
|
||||
destCur.execute("insert into measurements (time, application, device, attributes, values) values(%s, %s, %s, %s, %s)",
|
||||
(destTime, destApplication, destDevice, destAttributes, destValues))
|
||||
destConn.commit()
|
||||
finally:
|
||||
if srcConn:
|
||||
srcConn.close()
|
||||
if destConn:
|
||||
destConn.close()
|
||||
|
||||
|
||||
|
79
migration/migrate-pv.py
Normal file
79
migration/migrate-pv.py
Normal file
@ -0,0 +1,79 @@
|
||||
import psycopg2
|
||||
from loguru import logger
|
||||
import os
|
||||
|
||||
srcPgHost = os.environ["SRC_PGHOST"]
|
||||
srcPgUser = os.environ["SRC_PGUSER"]
|
||||
srcPgPassword = os.environ["SRC_PGPASSWORD"]
|
||||
srcPgDatabase = os.environ["SRC_PGDATABASE"]
|
||||
destPgHost = os.environ["DEST_PGHOST"]
|
||||
destPgUser = os.environ["DEST_PGUSER"]
|
||||
destPgPassword = os.environ["DEST_PGPASSWORD"]
|
||||
destPgDatabase = os.environ["DEST_PGDATABASE"]
|
||||
|
||||
try:
|
||||
srcConn = psycopg2.connect(
|
||||
host=srcPgHost,
|
||||
dbname=srcPgDatabase,
|
||||
user=srcPgUser,
|
||||
password=srcPgPassword,
|
||||
sslmode='require'
|
||||
)
|
||||
srcConn.autocommit = False
|
||||
|
||||
destConn = psycopg2.connect(
|
||||
host=destPgHost,
|
||||
dbname=destPgDatabase,
|
||||
user=destPgUser,
|
||||
password=destPgPassword,
|
||||
sslmode='require'
|
||||
)
|
||||
destConn.autocommit = False
|
||||
|
||||
with srcConn.cursor() as srcCur, destConn.cursor() as destCur:
|
||||
srcCur.execute("select time, deviceid, status, state, importenergyactive, importenergyreactive, exportenergyactive, exportenergyreactive, powerapparent, poweractive, powerreactive, powerdemandpositive, powerdemandreverse, factor, angle, voltage, current, powerdemand from pv_power_measurement_t order by time")
|
||||
for srcObj in srcCur:
|
||||
timestamp = srcObj[0]
|
||||
deviceName = srcObj[1]
|
||||
status = srcObj[2]
|
||||
state = srcObj[3]
|
||||
importenergyactive = srcObj[4]
|
||||
importenergyreactive = srcObj[5]
|
||||
exportenergyactive = srcObj[6]
|
||||
exportenergyreactive = srcObj[7]
|
||||
powerapparent = srcObj[8]
|
||||
poweractive = srcObj[9]
|
||||
powerreactive = srcObj[10]
|
||||
powerdemandpositive = srcObj[11]
|
||||
powerdemandreverse = srcObj[12]
|
||||
factor = srcObj[13]
|
||||
angle = srcObj[14]
|
||||
voltage = srcObj[15]
|
||||
current = srcObj[16]
|
||||
powerdemand = srcObj[17]
|
||||
|
||||
|
||||
logger.info(f"{timestamp=}, {deviceName=}")
|
||||
|
||||
destTime = timestamp
|
||||
destApplication = "PV"
|
||||
destDevice = "Powermeter"
|
||||
destAttributes = f"{{\"ApplicationId\":\"PV\", \"Status\":\"{status}\",\"Hint\": \"Migrated\"}}"
|
||||
destValues = f"{{\"Cnt\": {{\"unit\": \"\", \"label\": \"\", \"value\": \"-1\", \"variable\": \"Cnt\"}}, \"Angle\": {{\"unit\": \"degree\", \"label\": \"\", \"value\": \"{angle}\", \"variable\": \"Angle\"}}, \"State\": {{\"unit\": \"\", \"label\": \"\", \"value\": \"{state}\", \"variable\": \"State\"}}, \"Factor\": {{\"unit\": \"\", \"label\": \"\", \"value\": \"{factor}\", \"variable\": \"Factor\"}}, \"Current\": {{\"unit\": \"A\", \"label\": \"\", \"value\": \"{current}\", \"variable\": \"Current\"}}, \"Voltage\": {{\"unit\": \"V\", \"label\": \"\", \"value\": \"{voltage}\", \"variable\": \"Voltage\"}}, \"PowerActive\": {{\"unit\": \"W\", \"label\": \"\", \"value\": \"{poweractive}\", \"variable\": \"PowerActive\"}}, \"PowerApparent\": {{\"unit\": \"VA\", \"label\": \"\", \"value\": \"{powerapparent}\", \"variable\": \"PowerApparent\"}}, \"PowerReactive\": {{\"unit\": \"VA\", \"label\": \"\", \"value\": \"{powerreactive}\", \"variable\": \"PowerReactive\"}}, \"ExportEnergyActive\": {{\"unit\": \"Wh\", \"label\": \"\", \"value\": \"{exportenergyactive}\", \"variable\": \"ExportEnergyActive\"}}, \"ImportEnergyActive\": {{\"unit\": \"Wh\", \"label\": \"\", \"value\": \"{importenergyactive}\", \"variable\": \"ImportEnergyActive\"}}, \"PowerDemandReverse\": {{\"unit\": \"W\", \"label\": \"\", \"value\": \"{powerdemandreverse}\", \"variable\": \"PowerDemandReverse\"}}, \"PowerDemandPositive\": {{\"unit\": \"W\", \"label\": \"\", \"value\": \"{powerdemandpositive}\", \"variable\": \"PowerDemandPositive\"}}, \"ExportEnergyReactive\": {{\"unit\": \"VAh\", \"label\": \"\", \"value\": \"{exportenergyreactive}\", \"variable\": \"ExportEnergyReactive\"}}, \"ImportEnergyReactive\": {{\"unit\": \"VAh\", \"label\": \"\", \"value\": \"{importenergyreactive}\", \"variable\": \"ImportEnergyReactive\"}}}}"
|
||||
logger.info(f"{destTime=}, {destApplication=}, {destDevice=}, {destAttributes=}, {destValues=}")
|
||||
|
||||
|
||||
try:
|
||||
destCur.execute("insert into measurements (time, application, device, attributes, values) values(%s, %s, %s, %s, %s)",
|
||||
(destTime, destApplication, destDevice, destAttributes, destValues))
|
||||
destConn.commit()
|
||||
except Exception as e:
|
||||
destConn.rollback()
|
||||
logger.error(f"Error {e} when inserted time {destTime}")
|
||||
finally:
|
||||
if srcConn:
|
||||
srcConn.close()
|
||||
if destConn:
|
||||
destConn.close()
|
||||
|
||||
|
78
migration/migrate-temperature.py
Normal file
78
migration/migrate-temperature.py
Normal file
@ -0,0 +1,78 @@
|
||||
import psycopg2
|
||||
from loguru import logger
|
||||
import os
|
||||
|
||||
srcPgHost = os.environ["SRC_PGHOST"]
|
||||
srcPgUser = os.environ["SRC_PGUSER"]
|
||||
srcPgPassword = os.environ["SRC_PGPASSWORD"]
|
||||
srcPgDatabase = os.environ["SRC_PGDATABASE"]
|
||||
destPgHost = os.environ["DEST_PGHOST"]
|
||||
destPgUser = os.environ["DEST_PGUSER"]
|
||||
destPgPassword = os.environ["DEST_PGPASSWORD"]
|
||||
destPgDatabase = os.environ["DEST_PGDATABASE"]
|
||||
|
||||
try:
|
||||
srcConn = psycopg2.connect(
|
||||
host=srcPgHost,
|
||||
dbname=srcPgDatabase,
|
||||
user=srcPgUser,
|
||||
password=srcPgPassword,
|
||||
sslmode='require'
|
||||
)
|
||||
srcConn.autocommit = False
|
||||
|
||||
destConn = psycopg2.connect(
|
||||
host=destPgHost,
|
||||
dbname=destPgDatabase,
|
||||
user=destPgUser,
|
||||
password=destPgPassword,
|
||||
sslmode='require'
|
||||
)
|
||||
destConn.autocommit = False
|
||||
|
||||
with srcConn.cursor() as srcCur, destConn.cursor() as destCur:
|
||||
srcCur.execute("select time, location, status, temperature, category from room_climate_measurement_t where category = 'heating' and time > '2023-12-19 05:20:00' order by time")
|
||||
for srcObj in srcCur:
|
||||
timestamp = srcObj[0]
|
||||
location = srcObj[1]
|
||||
status = srcObj[2]
|
||||
temperature = srcObj[3]
|
||||
category = srcObj[4]
|
||||
|
||||
logger.info(f"{timestamp=}, {location=}, {status=}, {temperature=}, {category=}")
|
||||
|
||||
destTime = timestamp
|
||||
|
||||
match category:
|
||||
case 'heating':
|
||||
destApplication = 'Temperature Heating'
|
||||
case 'Outdoor':
|
||||
destApplication = 'Temperature Wago'
|
||||
case 'Device':
|
||||
destApplication = 'Temperature Wago'
|
||||
case 'Indoor':
|
||||
destApplication = 'Temperature Multisensor' if location != 'Anna-Koeln-2' else 'Temperature Shelly Plus HT'
|
||||
case 'Special':
|
||||
destApplication = 'Temperature Multisensor'
|
||||
|
||||
destDevice = location
|
||||
destAttributes = '{"ApplicationId":"temperature-imported", "Status":"' + status + '","Location":"' + location + '","Category":"' + category + '","Hint": "Migrated"}'
|
||||
destValues = '{"Value": {"unit": "°C", "label": "", "value": "' + str(temperature) + '", "variable": ""}}'
|
||||
|
||||
logger.info(f"{destTime=}, {destApplication=}, {destDevice=}, {destAttributes=}, {destValues=}")
|
||||
|
||||
try:
|
||||
destCur.execute("insert into measurements (time, application, device, attributes, values) values(%s, %s, %s, %s, %s)",
|
||||
(destTime, destApplication, destDevice, destAttributes, destValues))
|
||||
destConn.commit()
|
||||
except Exception as e:
|
||||
destConn.rollback()
|
||||
logger.error(f"Error {e} when inserted time {destTime}")
|
||||
|
||||
finally:
|
||||
if srcConn:
|
||||
srcConn.close()
|
||||
|
||||
if destConn:
|
||||
destConn.close()
|
||||
|
8
queries/berresheim.sql
Normal file
8
queries/berresheim.sql
Normal file
@ -0,0 +1,8 @@
|
||||
create or replace view level_v as
|
||||
select time,
|
||||
cast(values->'CorrectedDistance'->>'value' as float) as level,
|
||||
cast(values->'Battery'->>'value' as float) as battery,
|
||||
attributes->>'Status' as status,
|
||||
device
|
||||
from measurements
|
||||
where application = 'de-hottis-level-monitoring';
|
@ -45,3 +45,34 @@ create or replace view temperature_v as
|
||||
from measurements
|
||||
where application in ('Temperature Multisensor', 'Temperature Shelly Plus HT');
|
||||
|
||||
create or replace view temperature2_v as
|
||||
select time,
|
||||
cast(values->'Value'->>'value' as float) as temperature,
|
||||
device
|
||||
from measurements
|
||||
where application = 'Temperature Wago';
|
||||
|
||||
create or replace view humidity_v as
|
||||
select time,
|
||||
cast(values->'Value'->>'value' as float) as humidity,
|
||||
device
|
||||
from measurements
|
||||
where application in ('Humidity Multisensor');
|
||||
|
||||
create or replace view soil_v as
|
||||
select time,
|
||||
cast(values->'Water'->>'value' as float) as water,
|
||||
cast(values->'Conductance'->>'value' as float) as conductance,
|
||||
cast(values->'Temperature'->>'value' as float) as temperature,
|
||||
device
|
||||
from measurements
|
||||
where application = 'de-hottis-app01' and attributes->>'DeviceType' = 'dragino-lse01';
|
||||
|
||||
create or replace view co2_v as
|
||||
select time,
|
||||
cast(values->'CO2concentration'->>'value' as float) as co2concentration,
|
||||
cast(values->'Humidity'->>'value' as float) as humidity,
|
||||
cast(values->'Temperature'->>'value' as float) as temperature,
|
||||
device
|
||||
from measurements
|
||||
where application = 'de-hottis-app01' and attributes->>'DeviceType' = 'hottis-scd30';
|
||||
|
11
queries/old-daily-temperature-query.sql
Normal file
11
queries/old-daily-temperature-query.sql
Normal file
@ -0,0 +1,11 @@
|
||||
select
|
||||
extract('day' from time)::varchar || '.' || extract('month' from time)::varchar || '.' || extract('year' from time)::varchar as day,
|
||||
avg(temperature)::numeric(10,0) as temperature
|
||||
from room_climate_measurement_t
|
||||
where
|
||||
category = 'Outdoor' and
|
||||
location = 'Outdoor' and
|
||||
extract('hour' from time) = 12 and
|
||||
time::date = now()::date
|
||||
group by day
|
||||
|
73
queries/old-pv-yield-query.sql
Normal file
73
queries/old-pv-yield-query.sql
Normal file
@ -0,0 +1,73 @@
|
||||
-- query
|
||||
|
||||
with
|
||||
first_day_in_year as (
|
||||
select
|
||||
date_trunc('day', min(time)) as day
|
||||
from pv_power_measurement_t
|
||||
where
|
||||
time between date_trunc('year', time) and now()
|
||||
),
|
||||
first_value_in_year as (
|
||||
select
|
||||
time_bucket('1 day', time) as interval,
|
||||
first(exportenergyactive, time) as energy
|
||||
from pv_power_measurement_t
|
||||
where
|
||||
time between (select day from first_day_in_year) and (select day from first_day_in_year) + interval '1 day' and
|
||||
status = 'Ok'
|
||||
group by interval
|
||||
),
|
||||
first_day_in_month as (
|
||||
select
|
||||
date_trunc('day', min(time)) as day
|
||||
from pv_power_measurement_t
|
||||
where
|
||||
time between date_trunc('month', now()) and now()
|
||||
),
|
||||
first_value_in_month as (
|
||||
select
|
||||
time_bucket('1 day', time) as interval,
|
||||
first(exportenergyactive, time) as energy
|
||||
from pv_power_measurement_t
|
||||
where
|
||||
time between (select day from first_day_in_month) and (select day from first_day_in_month) + interval '1 day' and
|
||||
status = 'Ok'
|
||||
group by interval
|
||||
),
|
||||
first_value_in_day as (
|
||||
select
|
||||
time_bucket('1 day', time) as interval,
|
||||
first(exportenergyactive, time) as energy
|
||||
from pv_power_measurement_t
|
||||
where time >= date_trunc('day', now())
|
||||
group by interval
|
||||
),
|
||||
last_value as (
|
||||
select
|
||||
time_bucket('1 day', time) as interval,
|
||||
last(exportenergyactive, time) as energy
|
||||
from pv_power_measurement_t
|
||||
where
|
||||
time between date_trunc('day', now()) and date_trunc('day', now()) + interval '1 day' and
|
||||
status = 'Ok'
|
||||
group by interval
|
||||
)
|
||||
select
|
||||
extract(year from (select day from first_day_in_year))::text as period_value,
|
||||
'Year' as period_name,
|
||||
round(((select energy from last_value) - (select energy from first_value_in_year))::numeric, 2) as yield
|
||||
union
|
||||
select
|
||||
to_char((select day from first_day_in_month), 'Month') as period_value,
|
||||
'Month' as period_name,
|
||||
round(((select energy from last_value) - (select energy from first_value_in_month))::numeric, 2) as yield
|
||||
union
|
||||
select
|
||||
now()::date::text as period_value,
|
||||
'Day' as period_name,
|
||||
round(((select energy from last_value) - (select energy from first_value_in_day))::numeric, 2) as yield;
|
||||
|
||||
-- output format
|
||||
-- wn@atuin:~/Workspace/go-workspace/src/universal-data-ingest [main ≡ +0 ~1 -0 !]$ mosquitto_sub -h 172.23.1.102 -v -t IoT/PV/Yields
|
||||
-- IoT/PV/Yields {"Month":"1.43","Year":"285.39","Day":"0.00"}
|
@ -3,13 +3,7 @@ create or replace view power_v as
|
||||
cast(values->'ActivePowerL1'->>'value' as float) as power_l1,
|
||||
cast(values->'ActivePowerL2'->>'value' as float) as power_l2,
|
||||
cast(values->'ActivePowerL3'->>'value' as float) as power_l3,
|
||||
device
|
||||
from measurements
|
||||
where application = 'com-passavant-geiger-poc' and
|
||||
attributes->>'FPort' = '1';
|
||||
|
||||
create or replace view power_factor_v as
|
||||
select time,
|
||||
cast(values->'ActivePowerL123'->>'value' as float) as power_total,
|
||||
cast(values->'PowerfactorL1'->>'value' as float) as factor_l1,
|
||||
cast(values->'PowerfactorL2'->>'value' as float) as factor_l2,
|
||||
cast(values->'PowerfactorL3'->>'value' as float) as factor_l3,
|
||||
|
@ -2,9 +2,13 @@ if [ "$1" = "" ]; then
|
||||
echo "set namespace as argument"
|
||||
fi
|
||||
N=$1
|
||||
if [ "$2" = "" ]; then
|
||||
echo "set instance as argument"
|
||||
fi
|
||||
I=$2
|
||||
PGHOST=`kubectl get services traefik -n system -o jsonpath="{.status.loadBalancer.ingress[0].ip}"`
|
||||
PGPASSWORD=`kubectl get secrets udi-db-cred -n $N -o jsonpath="{.data.PGPASSWORD}" | base64 --decode`
|
||||
PGUSER=`kubectl get secrets udi-db-cred -n $N -o jsonpath="{.data.PGUSER}" | base64 --decode`
|
||||
PGSSLMODE=`kubectl get secrets udi-db-cred -n $N -o jsonpath="{.data.PGSSLMODE}" | base64 --decode`
|
||||
PGDATABASE=`kubectl get secrets udi-db-cred -n $N -o jsonpath="{.data.PGDATABASE}" | base64 --decode`
|
||||
PGPASSWORD=`kubectl get secrets $I-udi-db-cred -n $N -o jsonpath="{.data.PGPASSWORD}" | base64 --decode`
|
||||
PGUSER=`kubectl get secrets $I-udi-db-cred -n $N -o jsonpath="{.data.PGUSER}" | base64 --decode`
|
||||
PGSSLMODE=`kubectl get secrets $I-udi-db-cred -n $N -o jsonpath="{.data.PGSSLMODE}" | base64 --decode`
|
||||
PGDATABASE=`kubectl get secrets $I-udi-db-cred -n $N -o jsonpath="{.data.PGDATABASE}" | base64 --decode`
|
||||
export PGUSER PGHOST PGPASSWORD PGSSLMODE PGDATABASE
|
||||
|
@ -16,6 +16,7 @@ import "udi/handlers/mbgw3"
|
||||
import "udi/handlers/sver"
|
||||
import "udi/handlers/svej"
|
||||
import "udi/handlers/dt1t"
|
||||
import "udi/handlers/locative"
|
||||
|
||||
|
||||
var handlerMap map[string]handler.Handler = make(map[string]handler.Handler)
|
||||
@ -44,6 +45,8 @@ func InitDispatcher() {
|
||||
factory = svej.New
|
||||
case "DT1T":
|
||||
factory = dt1t.New
|
||||
case "Locative":
|
||||
factory = locative.New
|
||||
default:
|
||||
factory = nil
|
||||
log.Printf("No handler %s found, ignore mapping", mapping.Handler)
|
||||
|
73
src/udi/handlers/locative/locative.go
Normal file
73
src/udi/handlers/locative/locative.go
Normal file
@ -0,0 +1,73 @@
|
||||
package locative
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"time"
|
||||
"log"
|
||||
"encoding/json"
|
||||
"udi/config"
|
||||
"udi/handlers/handler"
|
||||
"udi/database"
|
||||
)
|
||||
|
||||
|
||||
type LocativeHandler struct {
|
||||
handler.CommonHandler
|
||||
dbh *database.DatabaseHandle
|
||||
}
|
||||
|
||||
type locativeEvent struct {
|
||||
Trigger string `json:"trigger"`
|
||||
Device string `json:"device"`
|
||||
Location string `json:"location"`
|
||||
Latitude string `json:"latitude"`
|
||||
Longitude string `json:"longitude"`
|
||||
Person string `json:"person"`
|
||||
Timestamp string `json:"timestamp"`
|
||||
}
|
||||
|
||||
func New(id string, config config.HandlerConfigT) handler.Handler {
|
||||
t := &LocativeHandler {
|
||||
}
|
||||
t.Id = id
|
||||
t.dbh = database.NewDatabaseHandle()
|
||||
return t
|
||||
}
|
||||
|
||||
|
||||
func (self *LocativeHandler) Handle(message handler.MessageT) {
|
||||
log.Printf("Handler Locative %d processing %s -> %s", self.Id, message.Topic, message.Payload)
|
||||
|
||||
var locativeEvent locativeEvent
|
||||
err := json.Unmarshal([]byte(message.Payload), &locativeEvent)
|
||||
if err != nil {
|
||||
self.Lost("Unable to parse payload into locativeEvent struct", err, message)
|
||||
return
|
||||
}
|
||||
|
||||
variables := make(map[string]database.VariableType)
|
||||
locativeEventStructValue := reflect.ValueOf(locativeEvent)
|
||||
for i := 0; i < locativeEventStructValue.NumField(); i++ {
|
||||
field := locativeEventStructValue.Type().Field(i)
|
||||
fieldValue := locativeEventStructValue.Field(i)
|
||||
v := database.VariableType {
|
||||
Label: "",
|
||||
Variable: field.Name,
|
||||
Unit: "",
|
||||
Value: fieldValue.Interface(),
|
||||
}
|
||||
variables[field.Name] = v
|
||||
}
|
||||
|
||||
measurement := database.Measurement {
|
||||
Time: time.Now(),
|
||||
Application: "Locative",
|
||||
Device: locativeEvent.Person,
|
||||
Values: variables,
|
||||
}
|
||||
|
||||
self.dbh.StoreMeasurement(&measurement)
|
||||
self.S()
|
||||
}
|
||||
|
||||
|
@ -2,7 +2,7 @@ package draginoLdds75
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log"
|
||||
// "log"
|
||||
"strings"
|
||||
"strconv"
|
||||
"encoding/json"
|
||||
@ -26,7 +26,7 @@ type message struct {
|
||||
TempC_DS18B20 string `json:"TempC_DS18B20"`
|
||||
}
|
||||
|
||||
func Parse(fPort int, decodedPayload []byte, _ string, variables *map[string]database.VariableType, device *database.Device) error {
|
||||
func Parse(fPort int, decodedPayload []byte, _ string, variables *map[string]database.VariableType, attributes *map[string]interface{}, device *database.Device) error {
|
||||
if fPort != 2 {
|
||||
return fmt.Errorf("Unexpected fPort %d", fPort)
|
||||
}
|
||||
@ -55,11 +55,20 @@ func Parse(fPort int, decodedPayload []byte, _ string, variables *map[string]dat
|
||||
Unit: "mm",
|
||||
Value: distance,
|
||||
}
|
||||
|
||||
if distance == 20 {
|
||||
(*attributes)["Status"] = "invalid value"
|
||||
} else if distance == 0 {
|
||||
(*attributes)["Status"] = "no sensor detected"
|
||||
} else {
|
||||
(*attributes)["Status"] = "Ok"
|
||||
}
|
||||
|
||||
groundLevelI, exists := device.Attributes["GroundLevel"]
|
||||
groundLevelS, ok := groundLevelI.(string)
|
||||
groundLevel, err3 := strconv.Atoi(groundLevelS)
|
||||
if exists && err3 == nil && ok {
|
||||
log.Println("add corrected distance")
|
||||
//log.Println("add corrected distance")
|
||||
correctedDistance := groundLevel - distance
|
||||
(*variables)["CorrectedDistance"] = database.VariableType {
|
||||
Label: "CorrectedDistance",
|
||||
@ -67,11 +76,11 @@ func Parse(fPort int, decodedPayload []byte, _ string, variables *map[string]dat
|
||||
Unit: "mm",
|
||||
Value: correctedDistance,
|
||||
}
|
||||
} else {
|
||||
} /* else {
|
||||
log.Printf("no ground level: %s %s %s", exists, err3, ok)
|
||||
log.Printf("Device: %s", device)
|
||||
log.Printf("Attributes: %s", device.Attributes)
|
||||
}
|
||||
} */
|
||||
|
||||
return nil
|
||||
}
|
||||
|
@ -26,7 +26,7 @@ type message struct {
|
||||
Dis2 int `json:"dis2"`
|
||||
}
|
||||
|
||||
func Parse(fPort int, decodedPayload []byte, _ string, variables *map[string]database.VariableType, device *database.Device) error {
|
||||
func Parse(fPort int, decodedPayload []byte, _ string, variables *map[string]database.VariableType, attributes *map[string]interface{}, device *database.Device) error {
|
||||
if fPort != 2 {
|
||||
return fmt.Errorf("Unexpected fPort %d", fPort)
|
||||
}
|
||||
@ -55,6 +55,15 @@ func Parse(fPort int, decodedPayload []byte, _ string, variables *map[string]dat
|
||||
Unit: "mm",
|
||||
Value: distance2,
|
||||
}
|
||||
|
||||
if distance1 == 2 {
|
||||
(*attributes)["Status"] = "invalid value"
|
||||
} else if distance1 == 1 {
|
||||
(*attributes)["Status"] = "no sensor detected"
|
||||
} else {
|
||||
(*attributes)["Status"] = "Ok"
|
||||
}
|
||||
|
||||
groundLevelI, exists := device.Attributes["GroundLevel"]
|
||||
groundLevelS, ok := groundLevelI.(string)
|
||||
groundLevel, err3 := strconv.Atoi(groundLevelS)
|
||||
|
@ -24,7 +24,7 @@ type message struct {
|
||||
Water_SOIL string `json:"water_SOIL"`
|
||||
}
|
||||
|
||||
func Parse(fPort int, decodedPayload []byte, _ string, variables *map[string]database.VariableType, device *database.Device) error {
|
||||
func Parse(fPort int, decodedPayload []byte, _ string, variables *map[string]database.VariableType, _ *map[string]interface{}, _ *database.Device) error {
|
||||
if fPort != 2 {
|
||||
return fmt.Errorf("Unexpected fPort %d", fPort)
|
||||
}
|
||||
|
@ -177,7 +177,7 @@ type emuMessage1 struct {
|
||||
|
||||
|
||||
|
||||
func Parse(fPort int, decodedPayload []byte, _ string, variables *map[string]database.VariableType, _ *database.Device) error {
|
||||
func Parse(fPort int, decodedPayload []byte, _ string, variables *map[string]database.VariableType, _ *map[string]interface{}, _ *database.Device) error {
|
||||
//log.Printf("Parse input: %d, %s", fPort, decodedPayload)
|
||||
switch fPort {
|
||||
case 1:
|
||||
|
61
src/udi/handlers/ttn/models/hottisScd30/hottisScd30.go
Normal file
61
src/udi/handlers/ttn/models/hottisScd30/hottisScd30.go
Normal file
@ -0,0 +1,61 @@
|
||||
package hottisScd30
|
||||
|
||||
import (
|
||||
//"log"
|
||||
"fmt"
|
||||
"bytes"
|
||||
"encoding/base64"
|
||||
"encoding/binary"
|
||||
"udi/database"
|
||||
)
|
||||
|
||||
type hottisScd30Values struct {
|
||||
Status uint8
|
||||
CO2Conc int32
|
||||
Temp int32
|
||||
Hum int32
|
||||
}
|
||||
|
||||
|
||||
func Parse(fPort int, _ []byte, frmPayload string, variables *map[string]database.VariableType, attributes *map[string]interface{}, _ *database.Device) error {
|
||||
if fPort != 2 {
|
||||
return fmt.Errorf("Unexpected fPort %d", fPort)
|
||||
}
|
||||
|
||||
b, err := base64.StdEncoding.DecodeString(frmPayload)
|
||||
if err != nil {
|
||||
return fmt.Errorf("Unable to base64-decode payload: %v", err)
|
||||
}
|
||||
|
||||
var values hottisScd30Values
|
||||
err = binary.Read(bytes.NewReader(b), binary.LittleEndian, &values)
|
||||
if err != nil {
|
||||
return fmt.Errorf("Unable to cast into struct: %v", err)
|
||||
}
|
||||
var co2concentration float32 = float32(values.CO2Conc) / 100;
|
||||
var temperature float32 = float32(values.Temp) / 100;
|
||||
var humidity float32 = float32(values.Hum) / 100;
|
||||
// log.Printf("CO2: %f, Temp: %f, Hum: %f, Status: %d", co2concentration, temperature, humidity, values.Status)
|
||||
|
||||
(*variables)["CO2concentration"] = database.VariableType {
|
||||
Label: "CO2concentration",
|
||||
Variable: "Concentration",
|
||||
Unit: "ppm",
|
||||
Value: co2concentration,
|
||||
}
|
||||
(*variables)["Temperature"] = database.VariableType {
|
||||
Label: "Temperature",
|
||||
Variable: "Temperature",
|
||||
Unit: "°C",
|
||||
Value: temperature,
|
||||
}
|
||||
(*variables)["Humidity"] = database.VariableType {
|
||||
Label: "Humidity",
|
||||
Variable: "Humidity",
|
||||
Unit: "%",
|
||||
Value: humidity,
|
||||
}
|
||||
|
||||
(*attributes)["Status"] = values.Status
|
||||
return nil
|
||||
}
|
@ -3,15 +3,23 @@ package rawPayloadPrinter
|
||||
import (
|
||||
"log"
|
||||
"fmt"
|
||||
"encoding/base64"
|
||||
"encoding/hex"
|
||||
"udi/database"
|
||||
)
|
||||
|
||||
|
||||
func Parse(fPort int, _ []byte, frmPayload string, variables *map[string]database.VariableType, device *database.Device) error {
|
||||
func Parse(fPort int, _ []byte, frmPayload string, variables *map[string]database.VariableType, _ *map[string]interface{}, _ *database.Device) error {
|
||||
if fPort != 2 {
|
||||
return fmt.Errorf("Unexpected fPort %d", fPort)
|
||||
}
|
||||
|
||||
log.Printf("frmPayload: %s", frmPayload)
|
||||
bytes, err := base64.StdEncoding.DecodeString(frmPayload)
|
||||
if err != nil {
|
||||
return fmt.Errorf("Unable to base64-decode payload: %v", err)
|
||||
}
|
||||
hexString := hex.EncodeToString(bytes)
|
||||
|
||||
log.Printf("Payload: %s", hexString)
|
||||
return nil
|
||||
}
|
||||
|
@ -12,6 +12,7 @@ import (
|
||||
"udi/handlers/ttn/models/draginoLmds200"
|
||||
"udi/handlers/ttn/models/draginoLse01"
|
||||
"udi/handlers/ttn/models/rawPayloadPrinter"
|
||||
"udi/handlers/ttn/models/hottisScd30"
|
||||
"udi/database"
|
||||
)
|
||||
|
||||
@ -134,7 +135,7 @@ func (self *TTNHandler) Handle(message handler.MessageT) {
|
||||
|
||||
//log.Printf("DeviceLabel: %s, DeviceType: %s", device.Label, device.DeviceType.ModelIdentifier)
|
||||
|
||||
var parser func(int, []byte, string, *map[string]database.VariableType, *database.Device) error
|
||||
var parser func(int, []byte, string, *map[string]database.VariableType, *map[string]interface{}, *database.Device) error
|
||||
switch device.DeviceType.ModelIdentifier {
|
||||
case "emu-prof-ii-lora-cfg1":
|
||||
parser = emuProfIILoRaCfg1.Parse
|
||||
@ -146,6 +147,8 @@ func (self *TTNHandler) Handle(message handler.MessageT) {
|
||||
parser = draginoLse01.Parse
|
||||
case "raw-payload-printer":
|
||||
parser = rawPayloadPrinter.Parse
|
||||
case "hottis-scd30":
|
||||
parser = hottisScd30.Parse
|
||||
default:
|
||||
self.Lost(fmt.Sprintf("No parser found for %s", device.DeviceType.ModelIdentifier), nil, message)
|
||||
return
|
||||
@ -156,6 +159,7 @@ func (self *TTNHandler) Handle(message handler.MessageT) {
|
||||
uplinkMessage.UplinkMessage.DecodedPayload.Payload,
|
||||
uplinkMessage.UplinkMessage.FrmPayload,
|
||||
&(measurement.Values),
|
||||
&(measurement.Attributes),
|
||||
device)
|
||||
if err3 != nil {
|
||||
self.Lost("Model parser failed", err3, message)
|
||||
|
Reference in New Issue
Block a user