Add the ability to include last command output in the alert

Update readme to document the new templating vars
This commit is contained in:
IamTheFij 2018-04-18 17:05:33 -07:00
parent 34c5ab47ff
commit 16be263093
6 changed files with 28 additions and 8 deletions

View File

@ -1,5 +1,5 @@
DOCKER_TAG := minitor-dev DOCKER_TAG := minitor-dev
OPEN_CMD := $(shell type xdg-open > /dev/null && echo 'xdg-open' || echo 'open') OPEN_CMD := $(shell type xdg-open &> /dev/null && echo 'xdg-open' || echo 'open')
.PHONY: default .PHONY: default
default: test default: test

View File

@ -42,7 +42,16 @@ The Docker image uses a default `config.yml` that is copied from `sample-config.
### Configuring ### Configuring
In this repo, you can explore the `sample-config.yml` file for an example, but the general structure is as follows. It should be noted that environment variable interpolation happens on load of the YAML file. Also, when alerts are executed, they will be passed through Python's format function with arguments for some attributes of the Monitor. Currently this is limited to `{monitor_name}`. In this repo, you can explore the `sample-config.yml` file for an example, but the general structure is as follows. It should be noted that environment variable interpolation happens on load of the YAML file. Also, when alerts are executed, they will be passed through Python's format function with arguments for some attributes of the Monitor. The following monitor specific variables can be referenced using Python formatting syntax:
|token|value|
|---|---|
|`{alert_count}`|Number of times this monitor has alerted|
|`{alert_message}`|The exception message that was raised|
|`{failure_count}`|The total number of sequential failed checks for this monitor|
|`{last_output}`|The last returned value from the check command to either stderr or stdout|
|`{last_success}`|The ISO datetime of the last successful check|
|`{monitor_name}`|The name of the monitor that failed and triggered the alert|
## Contributing ## Contributing

View File

@ -127,10 +127,11 @@ class Monitor(object):
self.alert_after = settings.get('alert_after') self.alert_after = settings.get('alert_after')
self.alert_every = settings.get('alert_every') self.alert_every = settings.get('alert_every')
self.alert_count = 0
self.last_check = None self.last_check = None
self.last_output = None
self.last_success = None self.last_success = None
self.total_failure_count = 0 self.total_failure_count = 0
self.alert_count = 0
self.logger = logging.getLogger( self.logger = logging.getLogger(
'{}({})'.format(self.__class__.__name__, self.name) '{}({})'.format(self.__class__.__name__, self.name)
@ -155,8 +156,10 @@ class Monitor(object):
self.command, self.command,
shell=isinstance(self.command, str), shell=isinstance(self.command, str),
) )
output = maybe_decode(output)
self.logger.debug(output) self.logger.debug(output)
self.last_check = datetime.now() self.last_check = datetime.now()
self.last_output = output
if ex is None: if ex is None:
self.success() self.success()
@ -240,9 +243,10 @@ class Alert(object):
self._formated_command( self._formated_command(
alert_count=monitor.alert_count, alert_count=monitor.alert_count,
alert_message=message, alert_message=message,
monitor_name=monitor.name,
failure_count=monitor.total_failure_count, failure_count=monitor.total_failure_count,
last_output=monitor.last_output,
last_success=self._format_datetime(monitor.last_success), last_success=self._format_datetime(monitor.last_success),
monitor_name=monitor.name,
), ),
shell=isinstance(self.command, str), shell=isinstance(self.command, str),
) )

View File

@ -25,7 +25,8 @@ class TestAlert(object):
'echo', ( 'echo', (
'{monitor_name} has failed {failure_count} time(s)!\n' '{monitor_name} has failed {failure_count} time(s)!\n'
'We have alerted {alert_count} time(s)\n' 'We have alerted {alert_count} time(s)\n'
'Last success was {last_success}' 'Last success was {last_success}\n'
'Last output was: {last_output}'
) )
] ]
} }
@ -45,13 +46,15 @@ class TestAlert(object):
last_success, last_success,
expected_success expected_success
): ):
monitor.total_failure_count = 1
monitor.alert_count = 1 monitor.alert_count = 1
monitor.last_output = 'beep boop'
monitor.last_success = last_success monitor.last_success = last_success
monitor.total_failure_count = 1
with patch.object(echo_alert.logger, 'error') as mock_error: with patch.object(echo_alert.logger, 'error') as mock_error:
echo_alert.alert('Exception message', monitor) echo_alert.alert('Exception message', monitor)
mock_error.assert_called_once_with( mock_error.assert_called_once_with(
'Dummy Monitor has failed 1 time(s)!\n' 'Dummy Monitor has failed 1 time(s)!\n'
'We have alerted 1 time(s)\n' 'We have alerted 1 time(s)\n'
'Last success was ' + expected_success 'Last success was ' + expected_success + '\n'
'Last output was: beep boop'
) )

View File

@ -107,15 +107,19 @@ class TestMonitor(object):
assert monitor.should_check() assert monitor.should_check()
def test_monitor_check_fail(self, monitor): def test_monitor_check_fail(self, monitor):
assert monitor.last_output is None
with patch.object(monitor, 'failure') as mock_failure: with patch.object(monitor, 'failure') as mock_failure:
monitor.command = ['ls', '--not-real'] monitor.command = ['ls', '--not-real']
assert not monitor.check() assert not monitor.check()
mock_failure.assert_called_once() mock_failure.assert_called_once()
assert monitor.last_output is not None
def test_monitor_check_success(self, monitor): def test_monitor_check_success(self, monitor):
assert monitor.last_output is None
with patch.object(monitor, 'success') as mock_success: with patch.object(monitor, 'success') as mock_success:
assert monitor.check() assert monitor.check()
mock_success.assert_called_once() mock_success.assert_called_once()
assert monitor.last_output is not None
@pytest.mark.parametrize('failure_count', [0, 1]) @pytest.mark.parametrize('failure_count', [0, 1])
def test_monitor_success(self, monitor, failure_count): def test_monitor_success(self, monitor, failure_count):

View File

@ -7,7 +7,7 @@ deps =
commands = commands =
coverage erase coverage erase
coverage run --source=minitor/ -m pytest --capture=no -vv {posargs:tests} coverage run --source=minitor/ -m pytest --capture=no -vv {posargs:tests}
coverage report -m --fail-under 50 coverage report -m --fail-under 70
pre-commit run --all-files pre-commit run --all-files
[testenv:pre-commit] [testenv:pre-commit]