repeated alarm message suppression

This commit is contained in:
Wolfgang Hottgenroth 2017-09-05 22:52:38 +02:00
parent 7963bb98fe
commit 0c6ebdc76e
6 changed files with 47 additions and 18 deletions

View File

@ -5,6 +5,8 @@
"brokerCa": "/home/wn/server-ca.crt",
"disableDatabaseAccess": true,
"mongoDbUrl": "mongodb://localhost/hottis",
"checkPeriod": 5,
"alarmRepeatPeriod": 15,
"smtpHost": "localhost",
"smtpPort": 25,
"smtpSender": "dispatcher@hottis.de",

6
dist/log.js vendored
View File

@ -30,7 +30,7 @@ function setLevel(value) {
}
}
exports.setLevel = setLevel;
function sendAlarmMail(message) {
function sendAlarmMail(subject, message) {
let transport = nodemailer.createTransport({
host: config.dict.smtpHost,
port: config.dict.smtpPort,
@ -42,12 +42,12 @@ function sendAlarmMail(message) {
let mail = {
from: config.dict.smtpSender,
to: config.dict.smtpReceiver,
subject: "Alarm from Dispatcher",
subject: subject,
text: message
};
transport.sendMail(mail)
.then((v) => {
info(`Alarm mail sent, ${message}, ${v.response}`);
info(`Mail sent, ${subject}, ${message}, ${v.response}`);
})
.catch((reason) => {
error(`Failure when sending alarm mail: ${message}, ${reason}`);

2
dist/main.js vendored
View File

@ -9,7 +9,7 @@ const MissingEventDetector = require("./missingeventdetector");
log.info("Dispatcher starting");
config.readConfig();
let dispatcher = new mqtt.MqttDispatcher(config.dict.brokerUrl, config.dict.brokerUser, config.dict.brokerPass, config.dict.brokerCa);
const ESP_THERM_TOPIC = 'IoT/espThermometer3/#';
const ESP_THERM_TOPIC = 'IoT/espThermometer2/#';
dispatcher.register(ESP_THERM_TOPIC, 'toJson', EspThermToJson.espThermToJson);
let missingeventdetector = new MissingEventDetector.MissingEventDetector();
dispatcher.register(ESP_THERM_TOPIC, 'MissingEventDetector', missingeventdetector);

View File

@ -3,7 +3,7 @@ Object.defineProperty(exports, "__esModule", { value: true });
const log = require("./log");
const CallChain = require("./callchain");
const Processor = require("./processor");
const CHECK_PERIOD = 60; // seconds
const config = require("./config");
class ClientEntry {
}
class MissingEventProcessor extends Processor.AProcessor {
@ -15,11 +15,18 @@ class MissingEventProcessor extends Processor.AProcessor {
let currentTime = new Date().getTime();
let elapsedTime = currentTime - value.lastEvent;
log.info(`MissingEventProcessor: Checking ${key}, elapsed: ${elapsedTime / 1000}, avg. delay: ${value.avgDelay / 1000}`);
if ((value.avgDelay != 0) && (elapsedTime > (value.avgDelay * 3))) {
log.sendAlarmMail(`Missing Event Detected: ${key}, elapsed: ${elapsedTime / 1000}, avg. delay: ${value.avgDelay / 1000}`);
if (value.alarmState && ((value.lastAlarmMessage + config.dict.alarmRepeatPeriod * 1000) < currentTime)) {
value.lastAlarmMessage = currentTime;
value.alarmMessageCounter += 1;
log.sendAlarmMail('Repeated Alarm', `Missing Event Detected: ${key}, repeated: ${value.alarmMessageCounter}, elapsed: ${elapsedTime / 1000}, avg. delay: ${value.avgDelay / 1000}`);
}
if ((!value.alarmState) && ((value.avgDelay != 0) && (elapsedTime > (value.avgDelay * 3)))) {
log.sendAlarmMail('Alarm', `Missing Event Detected: ${key}, elapsed: ${elapsedTime / 1000}, avg. delay: ${value.avgDelay / 1000}`);
value.alarmState = true;
value.lastAlarmMessage = currentTime;
}
});
}, CHECK_PERIOD * 1000);
}, config.dict.checkPeriod * 1000);
}
process(message) {
let client = message.metadata.client;
@ -35,6 +42,11 @@ class MissingEventProcessor extends Processor.AProcessor {
clientEntry.delaySum += clientEntry.delay;
clientEntry.avgDelay = clientEntry.delaySum / clientEntry.count;
}
if (clientEntry.alarmState) {
log.sendAlarmMail('Release', `Event received again: ${client}`);
clientEntry.alarmMessageCounter = 0;
}
clientEntry.alarmState = false;
this.clientMap.set(client, clientEntry);
log.info(`MissingEventProcessor: Entry for ${client} updated`);
}
@ -46,6 +58,8 @@ class MissingEventProcessor extends Processor.AProcessor {
clientEntry.count = -1;
clientEntry.delaySum = 0;
clientEntry.avgDelay = 0;
clientEntry.alarmState = false;
clientEntry.alarmMessageCounter = 0;
this.clientMap.set(client, clientEntry);
log.info(`MissingEventProcessor: Entry for ${client} inserted`);
}

View File

@ -26,7 +26,7 @@ export function setLevel(value: string): void {
}
}
export function sendAlarmMail(message : string): void {
export function sendAlarmMail(subject : string, message : string): void {
let transport = nodemailer.createTransport({
host: config.dict.smtpHost,
port: config.dict.smtpPort,
@ -39,13 +39,13 @@ export function sendAlarmMail(message : string): void {
let mail : nodemailer.SendMailOptions = {
from: config.dict.smtpSender,
to: config.dict.smtpReceiver,
subject: "Alarm from Dispatcher",
subject: subject,
text: message
};
transport.sendMail(mail)
.then((v : nodemailer.SentMessageInfo) => {
info(`Alarm mail sent, ${message}, ${v.response}`)
info(`Mail sent, ${subject}, ${message}, ${v.response}`)
})
.catch((reason : any) => {
error(`Failure when sending alarm mail: ${message}, ${reason}`)

View File

@ -4,9 +4,6 @@ import * as Processor from './processor'
import * as config from './config'
const CHECK_PERIOD : number = 60 // seconds
class ClientEntry {
client : string
lastEvent : number
@ -14,6 +11,9 @@ class ClientEntry {
count : number
delaySum : number
avgDelay : number
alarmState : boolean
lastAlarmMessage : number
alarmMessageCounter: number
}
class MissingEventProcessor extends Processor.AProcessor {
@ -30,12 +30,18 @@ class MissingEventProcessor extends Processor.AProcessor {
let currentTime : number = new Date().getTime()
let elapsedTime : number = currentTime - value.lastEvent
log.info(`MissingEventProcessor: Checking ${key}, elapsed: ${elapsedTime / 1000}, avg. delay: ${value.avgDelay / 1000}`)
if ((value.avgDelay != 0) && (elapsedTime > (value.avgDelay * 3))) {
log.sendAlarmMail(`Missing Event Detected: ${key}, elapsed: ${elapsedTime / 1000}, avg. delay: ${value.avgDelay / 1000}`)
if (value.alarmState && ((value.lastAlarmMessage + config.dict.alarmRepeatPeriod * 1000) < currentTime)) {
value.lastAlarmMessage = currentTime
value.alarmMessageCounter += 1
log.sendAlarmMail('Repeated Alarm', `Missing Event Detected: ${key}, repeated: ${value.alarmMessageCounter}, elapsed: ${elapsedTime / 1000}, avg. delay: ${value.avgDelay / 1000}`)
}
if ((! value.alarmState) && ((value.avgDelay != 0) && (elapsedTime > (value.avgDelay * 3)))) {
log.sendAlarmMail('Alarm', `Missing Event Detected: ${key}, elapsed: ${elapsedTime / 1000}, avg. delay: ${value.avgDelay / 1000}`)
value.alarmState = true
value.lastAlarmMessage = currentTime
}
})
}, CHECK_PERIOD * 1000)
}, config.dict.checkPeriod * 1000)
}
protected process(message : any) : void {
@ -52,6 +58,11 @@ class MissingEventProcessor extends Processor.AProcessor {
clientEntry.delaySum += clientEntry.delay
clientEntry.avgDelay = clientEntry.delaySum / clientEntry.count
}
if (clientEntry.alarmState) {
log.sendAlarmMail('Release', `Event received again: ${client}`)
clientEntry.alarmMessageCounter = 0
}
clientEntry.alarmState = false
this.clientMap.set(client, clientEntry)
log.info(`MissingEventProcessor: Entry for ${client} updated`)
} else {
@ -62,6 +73,8 @@ class MissingEventProcessor extends Processor.AProcessor {
clientEntry.count = -1
clientEntry.delaySum = 0
clientEntry.avgDelay = 0
clientEntry.alarmState = false
clientEntry.alarmMessageCounter = 0
this.clientMap.set(client, clientEntry)
log.info(`MissingEventProcessor: Entry for ${client} inserted`)
}