Resolve events are being filtered for an unknown reason

I’m trying to figure out why I don’t get all the “Resolve” emails I’m expecting to get.
This is my only filter:

{

“filters”: {

"basic_occurrences": {

  "attributes": {

    "occurrences": "eval: value == :::check.occurrences|1::: || ':::action:::' == 'resolve' && value >= :::check.occurrences|1:::"

  },

  "negate": false

}

}

}

The Idea is to send just one alert after the check fails the number of time specified in check.occurrences (falls back to 1), and send a recovery **only if the fail alert was sent **(this is why I check value >= :::check.occurrences)

but most of the resolve alerts are filtered for some reason, see this event for example:

{

“timestamp”: “2016-10-12T17:24:44.782405+0000”,

“level”: “info”,

"message": “event was filtered”,

“handler”: {

"command": "/opt/sensu/embedded/bin/handler-mailer.rb",

"severities": [

  "ok",

  "critical",

  "unknown"

],

"filters": [

  "basic_occurrences"

],

"type": "pipe",

"name": "mailer"

},

“event”: {

"client": {

  "name": "just.a.host.net",

  "address": "##.##.##.##",

  "subscriptions": [

    "just.a.host.net",

    "client:just.a.host.net"

  ],

  "socket": {

    "bind": "127.0.0.1",

    "port": 3030

  },

  "safe_mode": false,

  "keepalive": {

    "enable_deprecated_filtering": false,

    "enable_deprecated_occurrence_filtering": false,

    "thresholds": {

      "critical": 180,

      "warning": 120

    },

    "occurrences": 3,

    "env": "production",

    "handlers": [

      "slack",

      "mailer",

      "logstash"

    ],

    "hostgroup": "general_server/bs"

  },

  "version": "0.26.1",

  "timestamp": 1476293073

},

"check": {

  "enable_deprecated_filtering": false,

  "timeout": 290,

  "standalone": true,

  "puppet_module": "icinga",

  "env": "production",

  "command": "/tmp/1.sh  60 300",

  "handlers": [

    "slack",

    "mailer",

    "logstash"

  ],

  "enable_deprecated_occurrence_filtering": false,

  "interval": 300,

  "refresh": 7776000,

  "high_flap_threshold": 60,

  "hostgroup": "general_server/bs",

  "description": "delay in minutes",

  "puppet_caller_module": "vrt",

"occurrences": 3,

  "low_flap_threshold": 20,

  "type": "standard",

  "alert_on_individual_failure": true,

  "aggregate": false,

  "name": "loading_delay",

  "issued": 1476293084,

  "executed": 1476293084,

  "duration": 0.499,

  "output": "Not found any loading delay, current delay for ####  is 4 minutes, for #### is 264 minutes\n",

  "status": 0,

  "history": [

    "0",

    "0",

    "0",

    "0",

    "2",

    "2",

    "2",

    "2",

    "2",

    "2",

    "0",

    "0",

    "0",

    "0",

    "0",

    "0",

    "2",

    "2",

    "2",

    "2",

    "0"

  ],

  "total_state_change": 21

},

"occurrences": 4,

"occurrences_watermark": 4,

"action": “resolve”,

"timestamp": 1476293084,

"id": "b6e7debf-76c5-41ad-bac5-38e8a96794ff",

"last_state_change": 1476293084,

"last_ok": 1476293084,

"silenced": false,

"silenced_by": []

}

}

Anyone has an idea what am I missing?

So…

The filter does work, if you restart the sensu-server service after making changes in it :slight_smile:

Opened PR for the Puppet module:

I’m considiering using GitHub - sensu/sensu-extensions-occurrences: The Sensu Core built-in occurrences filter extension, it uses more or less the same logic but its maintained, documented and generaly nicer.

···

On Wednesday, October 12, 2016 at 9:29:53 PM UTC+3, Oded Ben Ozer wrote:

I’m trying to figure out why I don’t get all the “Resolve” emails I’m expecting to get.
This is my only filter:

{

“filters”: {

"basic_occurrences": {
  "attributes": {
    "occurrences": "eval: value == :::check.occurrences|1::: || ':::action:::' == 'resolve' && value >= :::check.occurrences|1:::"
  },
  "negate": false
}

}

}

The Idea is to send just one alert after the check fails the number of time specified in check.occurrences (falls back to 1), and send a recovery **only if the fail alert was sent **(this is why I check value >= :::check.occurrences)

but most of the resolve alerts are filtered for some reason, see this event for example:

{

“timestamp”: “2016-10-12T17:24:44.782405+0000”,

“level”: “info”,

“message”: “event was filtered”,

“handler”: {

"command": "/opt/sensu/embedded/bin/handler-mailer.rb",
"severities": [
  "ok",
  "critical",
  "unknown"
],
"filters": [
  "basic_occurrences"
],
"type": "pipe",
"name": "mailer"

},

“event”: {

"client": {
  "name": "[just.a.host.net](http://just.a.host.net)",
  "address": "##.##.##.##",
  "subscriptions": [
    "[just.a.host.net](http://just.a.host.net)",
    "client:[just.a.host.net](http://just.a.host.net)"
  ],
  "socket": {
    "bind": "127.0.0.1",
    "port": 3030
  },
  "safe_mode": false,
  "keepalive": {
    "enable_deprecated_filtering": false,
    "enable_deprecated_occurrence_filtering": false,
    "thresholds": {
      "critical": 180,
      "warning": 120
    },
    "occurrences": 3,
    "env": "production",
    "handlers": [
      "slack",
      "mailer",
      "logstash"
    ],
    "hostgroup": "general_server/bs"
  },
  "version": "0.26.1",
  "timestamp": 1476293073
},
"check": {
  "enable_deprecated_filtering": false,
  "timeout": 290,
  "standalone": true,
  "puppet_module": "icinga",
  "env": "production",
  "command": "/tmp/1.sh  60 300",
  "handlers": [
    "slack",
    "mailer",
    "logstash"
  ],
  "enable_deprecated_occurrence_filtering": false,
  "interval": 300,
  "refresh": 7776000,
  "high_flap_threshold": 60,
  "hostgroup": "general_server/bs",
  "description": "delay in minutes",
  "puppet_caller_module": "vrt",

“occurrences”: 3,

  "low_flap_threshold": 20,
  "type": "standard",
  "alert_on_individual_failure": true,
  "aggregate": false,
  "name": "loading_delay",
  "issued": 1476293084,
  "executed": 1476293084,
  "duration": 0.499,
  "output": "Not found any loading delay, current delay for ####  is 4 minutes, for #### is 264 minutes\n",
  "status": 0,
  "history": [
    "0",
    "0",
    "0",
    "0",
    "2",
    "2",
    "2",
    "2",
    "2",
    "2",
    "0",
    "0",
    "0",
    "0",
    "0",
    "0",
    "2",
    "2",
    "2",
    "2",
    "0"
  ],
  "total_state_change": 21
},

“occurrences”: 4,

"occurrences_watermark": 4,

“action”: “resolve”,

"timestamp": 1476293084,
"id": "b6e7debf-76c5-41ad-bac5-38e8a96794ff",
"last_state_change": 1476293084,
"last_ok": 1476293084,
"silenced": false,
"silenced_by": []

}

}

Anyone has an idea what am I missing?

Hi,

Any update regarding this issue?

I’m not able to get notifications on resolved while using the filter?:

“occurrences”: “eval: value == 1 || ‘:action:’ == ‘resolve’”

Thanks

Tal

בתאריך יום שלישי, 18 באוקטובר 2016 בשעה 11:45:32 UTC+3, מאת Oded Ben Ozer:

···

So…

The filter does work, if you restart the sensu-server service after making changes in it :slight_smile:

Opened PR for the Puppet module:
https://github.com/sensu/sensu-puppet/pull/562

I’m considiering using https://github.com/sensu-extensions/sensu-extensions-occurrences, it uses more or less the same logic but its maintained, documented and generaly nicer.

On Wednesday, October 12, 2016 at 9:29:53 PM UTC+3, Oded Ben Ozer wrote:

I’m trying to figure out why I don’t get all the “Resolve” emails I’m expecting to get.
This is my only filter:

{

“filters”: {

"basic_occurrences": {
  "attributes": {
    "occurrences": "eval: value == :::check.occurrences|1::: || ':::action:::' == 'resolve' && value >= :::check.occurrences|1:::"
  },
  "negate": false
}

}

}

The Idea is to send just one alert after the check fails the number of time specified in check.occurrences (falls back to 1), and send a recovery **only if the fail alert was sent **(this is why I check value >= :::check.occurrences)

but most of the resolve alerts are filtered for some reason, see this event for example:

{

“timestamp”: “2016-10-12T17:24:44.782405+0000”,

“level”: “info”,

“message”: “event was filtered”,

“handler”: {

"command": "/opt/sensu/embedded/bin/handler-mailer.rb",
"severities": [
  "ok",
  "critical",
  "unknown"
],
"filters": [
  "basic_occurrences"
],
"type": "pipe",
"name": "mailer"

},

“event”: {

"client": {
  "name": "[just.a.host.net](http://just.a.host.net)",
  "address": "##.##.##.##",
  "subscriptions": [
    "[just.a.host.net](http://just.a.host.net)",
    "client:[just.a.host.net](http://just.a.host.net)"
  ],
  "socket": {
    "bind": "127.0.0.1",
    "port": 3030
  },
  "safe_mode": false,
  "keepalive": {
    "enable_deprecated_filtering": false,
    "enable_deprecated_occurrence_filtering": false,
    "thresholds": {
      "critical": 180,
      "warning": 120
    },
    "occurrences": 3,
    "env": "production",
    "handlers": [
      "slack",
      "mailer",
      "logstash"
    ],
    "hostgroup": "general_server/bs"
  },
  "version": "0.26.1",
  "timestamp": 1476293073
},
"check": {
  "enable_deprecated_filtering": false,
  "timeout": 290,
  "standalone": true,
  "puppet_module": "icinga",
  "env": "production",
  "command": "/tmp/1.sh  60 300",
  "handlers": [
    "slack",
    "mailer",
    "logstash"
  ],
  "enable_deprecated_occurrence_filtering": false,
  "interval": 300,
  "refresh": 7776000,
  "high_flap_threshold": 60,
  "hostgroup": "general_server/bs",
  "description": "delay in minutes",
  "puppet_caller_module": "vrt",

“occurrences”: 3,

  "low_flap_threshold": 20,
  "type": "standard",
  "alert_on_individual_failure": true,
  "aggregate": false,
  "name": "loading_delay",
  "issued": 1476293084,
  "executed": 1476293084,
  "duration": 0.499,
  "output": "Not found any loading delay, current delay for ####  is 4 minutes, for #### is 264 minutes\n",
  "status": 0,
  "history": [
    "0",
    "0",
    "0",
    "0",
    "2",
    "2",
    "2",
    "2",
    "2",
    "2",
    "0",
    "0",
    "0",
    "0",
    "0",
    "0",
    "2",
    "2",
    "2",
    "2",
    "0"
  ],
  "total_state_change": 21
},

“occurrences”: 4,

"occurrences_watermark": 4,

“action”: “resolve”,

"timestamp": 1476293084,
"id": "b6e7debf-76c5-41ad-bac5-38e8a96794ff",
"last_state_change": 1476293084,
"last_ok": 1476293084,
"silenced": false,
"silenced_by": []

}

}

Anyone has an idea what am I missing?