Compare commits

...

41 Commits

Author SHA1 Message Date
6408ca1e42 Remove script
Some checks reported errors
continuous-integration/drone/push Build is passing
continuous-integration/drone Build was killed
2022-04-07 14:09:16 -07:00
a6b05582fd Update curl version
Some checks failed
continuous-integration/drone/push Build is failing
2022-04-07 13:33:04 -07:00
265f9f8372 Remove pypy tests due to missing rust in Docker
Some checks failed
continuous-integration/drone/push Build is failing
2022-04-07 10:14:02 -07:00
d27bcf85dd run tests on pypy3.9
Some checks failed
continuous-integration/drone/push Build is failing
2022-04-05 21:49:25 -07:00
e41d82f9d2 Blacken
Some checks failed
continuous-integration/drone/push Build is failing
2022-04-04 20:23:15 -07:00
094c910cd4 Update supported tested python versions
Some checks failed
continuous-integration/drone/push Build is failing
2022-04-04 20:17:06 -07:00
d5d2be870a Bump bash and curl versions
All checks were successful
continuous-integration/drone/push Build is passing
2021-06-02 09:02:45 -06:00
701ad0be1b Mention minitor-go
Some checks failed
continuous-integration/drone/push Build is failing
2021-06-01 18:38:55 -06:00
8e252f3bcb Slightly reduce image size by removing pip cache
All checks were successful
continuous-integration/drone/push Build is passing
2020-01-30 10:28:30 -08:00
1852e8c439 Revert both commits that remove py-tests
All checks were successful
continuous-integration/drone/push Build is passing
Revert "Remove test dependency too"

This reverts commit db12bb5db1.

Revert "Remove py-tests and pypi to speed up docker validation"

This reverts commit 9aa77b3739.
2020-01-10 16:08:49 -08:00
db12bb5db1 Remove test dependency too
Some checks reported errors
continuous-integration/drone/push Build was killed
2020-01-10 16:07:19 -08:00
8992ac1d33 Add qemu binary download 2020-01-10 16:06:35 -08:00
9aa77b3739 Remove py-tests and pypi to speed up docker validation 2020-01-10 16:06:13 -08:00
67c02a3e6f Reduce piplines in build to reduce parallelism and notifications 2020-01-10 16:01:48 -08:00
c9eaaaa45c Split tox step out of test pipelines 2020-01-10 15:57:08 -08:00
1251532ca6 Fix duplicate step names
Some checks failed
continuous-integration/drone/push Build is failing
2020-01-10 15:37:02 -08:00
874d4ab0aa Again move building docker latest on push to master
Some checks reported errors
continuous-integration/drone/push Build encountered an error
2020-01-10 14:55:13 -08:00
ad4a3770e7 Update build pipeline to notify on docker fail/success
All checks were successful
continuous-integration/drone/push Build is passing
2020-01-10 14:43:05 -08:00
64e6542a93 Bump version of curl
All checks were successful
continuous-integration/drone/push Build is passing
2020-01-10 14:00:18 -08:00
0208858e5e Get back to building latest docker image on pushes to master
Some checks failed
continuous-integration/drone/push Build is failing
2020-01-10 21:35:54 +00:00
580f41e60e Switch from yaml to starlark build configuration
All checks were successful
continuous-integration/drone/push Build is passing
Starlark provides more advanced generation of build pipelines.

https://docs.drone.io/starlark/overview/
2020-01-09 15:08:57 -08:00
5a7adbcff5 Drop Python 3.4 support 2020-01-06 19:15:34 -08:00
eed6262f27 Update run-metrics target to the same as run 2020-01-06 18:47:15 -08:00
0cbdff1b5d Update Dockerfiles to newer (roughly) pinned versions
Some checks failed
continuous-integration/drone/push Build is failing
2019-11-22 14:46:16 -08:00
ad0bcc2b3f Add .mypy_cache to .gitignore
Some checks failed
continuous-integration/drone/push Build is failing
2019-11-21 12:11:26 -08:00
0a70ca768f Improve apk install
Some checks failed
continuous-integration/drone/push Build is failing
Make image more stable and slim by removing cache and pinning versions
2019-10-24 12:12:03 -07:00
IamTheFij
9eef77a457 Add bash syntax highlighting to Readme
Some checks failed
continuous-integration/drone/push Build is failing
No meaningful changes otherwise. :)
2019-08-22 23:01:15 +00:00
f74479c53a Fix Makefile typo
All checks were successful
continuous-integration/drone/push Build is passing
Accidentally referenced arm instead of arm64
2019-08-05 15:53:09 -07:00
03ccef5ae5 Bump version
Some checks reported errors
continuous-integration/drone/push Build was killed
continuous-integration/drone/tag Build is passing
2019-08-02 17:35:48 -07:00
0557ab11c2 Fix docker healthcheck again
Some checks reported errors
continuous-integration/drone/push Build was killed
2019-08-02 17:35:13 -07:00
da0c8a0fa9 Bump version to fix Docker
Some checks reported errors
continuous-integration/drone/push Build was killed
continuous-integration/drone/tag Build is passing
2019-08-01 14:50:33 -07:00
e3e54c7a0f Update Makefile to make testing runs easier 2019-08-01 14:50:09 -07:00
9a8dbdbfef Roll utils and non-root stuff into main image
All checks were successful
continuous-integration/drone/push Build is passing
2019-08-01 13:59:05 -07:00
e19280151e Update Dockerfile and example Dockerfile
Done to show non-root usage and more inbuilt utilities
2019-08-01 13:54:52 -07:00
8c14c9383e Update travis build
All checks were successful
continuous-integration/drone/push Build is passing
continuous-integration/drone/tag Build is passing
2019-07-30 23:12:37 -07:00
0db1f88330 Try to fix py34 and py35 builds
Fixing this by switching to tox outside the venv

Also adding more python versions
2019-07-30 23:12:37 -07:00
cc66ab1918 Ignore virtualenv in docker build 2019-07-30 23:08:25 -07:00
2e814dea86 Simplify drone build
Some checks failed
continuous-integration/drone/push Build is failing
2019-07-30 21:34:42 -07:00
4683b3856e Allow building against more Python versions
Some checks failed
continuous-integration/drone/push Build is failing
2019-07-30 21:17:24 -07:00
e8f7ba6801 Update docker scripts to work with a proxy 2019-07-29 15:01:07 -07:00
e5687ed83e Add multi-arch build pipeline
Some checks reported errors
continuous-integration/drone/push Build was killed
2019-06-20 14:16:16 -07:00
23 changed files with 654 additions and 443 deletions

View File

@ -6,3 +6,4 @@
**/__pycache__/
scripts/README.md
examples/
env/

201
.drone.star Normal file
View File

@ -0,0 +1,201 @@
# Build pipelines
def main(ctx):
pipelines = []
# Run tests
pipelines += run_tests()
# Add pypi push pipeline
pipelines += push_to_pypi(ctx)
# Add docker push pipelines
pipelines += push_to_docker(ctx)
return pipelines
# Return workspace in the container
def get_workspace():
return {
"base": "/app",
"path": ".",
}
# Builds a list of all test pipelines to be executed
def run_tests():
return [{
"kind": "pipeline",
"name": "tests",
"workspace": get_workspace(),
"steps": [
tox_step("python:3.7"),
tox_step("python:3.8"),
tox_step("python:3.9"),
tox_step("python:3.10"),
tox_step("python:3"),
# tox_step("pypy:3.9", "pypy3", "pypy3"),
# tox_step("pypy:3", "pypy3", "pypy3"),
notify_step(),
],
}]
# Builds a single python test step
def tox_step(docker_tag, python_cmd="python", tox_env="py3"):
return {
"name": "test {}".format(docker_tag.replace(":", "")),
"image": docker_tag,
"environment": {
"TOXENV": tox_env,
},
"commands": [
"{} -V".format(python_cmd),
"pip install tox",
"tox",
],
}
# Builds a notify step that will notify when the previous step changes
def notify_step():
return {
"name": "notify",
"image": "drillster/drone-email",
"settings": {
"host": {
"from_secret": "SMTP_HOST",
},
"username": {
"from_secret": "SMTP_USER",
},
"password": {
"from_secret": "SMTP_PASS",
},
"from": "drone@iamthefij.com",
},
"when": {
"status": [
"changed",
"failure",
],
},
}
# Push package to pypi
def push_to_pypi(ctx):
return [{
"kind": "pipeline",
"name": "deploy to pypi",
"depends_on": ["tests"],
"workspace": get_workspace(),
"trigger": {
"event": ["tag"],
"ref": [
"refs/heads/master",
"refs/tags/v*",
],
},
"steps": [
{
"name": "push to test pypi",
"image": "python:3",
"environment": {
"TWINE_USERNAME": {
"from_secret": "PYPI_USERNAME",
},
"TWINE_PASSWORD": {
"from_secret": "TEST_PYPI_PASSWORD",
},
},
"commands": ["make upload-test"],
},
{
"name": "push to pypi",
"image": "python:3",
"environment": {
"TWINE_USERNAME": {
"from_secret": "PYPI_USERNAME",
},
"TWINE_PASSWORD": {
"from_secret": "PYPI_PASSWORD",
},
},
"commands": ["make upload"],
"when": {
"event": ["tag"],
},
},
notify_step(),
]
}]
# Build and push docker image
def push_docker_step(tag_suffix, arch, repo):
return {
"name": "push {}".format(tag_suffix),
"image": "plugins/docker",
"settings": {
"repo": "iamthefij/minitor",
"auto_tag": True,
"auto_tag_suffix": tag_suffix,
"username": {
"from_secret": "docker_username",
},
"password": {
"from_secret": "docker_password",
},
"build_args": [
"ARCH={}".format(arch),
"REPO={}".format(repo),
],
},
}
# Builds a pipeline to push to docker
def push_to_docker(ctx):
return [{
"kind": "pipeline",
"name": "push to docker",
"depends_on": ["tests"],
"workspace": get_workspace(),
"trigger": {
"event": ["tag", "push"],
"ref": [
"refs/heads/master",
"refs/tags/v*",
],
},
"steps": [
{
"name": "get qemu",
"image": "busybox",
"commands": ["sh ./get_qemu.sh x86_64 arm aarch64"],
},
push_docker_step("linux-amd64", "x86_64", "library"),
push_docker_step("linux-arm", "arm", "arm32v6"),
push_docker_step("linux-arm64", "aarch64", "arm64v8"),
{
"name": "publish manifest",
"image": "plugins/manifest",
"settings": {
"spec": "manifest.tmpl",
"auto_tag": True,
"ignore_missing": True,
"username": {
"from_secret": "docker_username",
},
"password": {
"from_secret": "docker_password",
},
}
},
notify_step(),
],
}]
# vim: ft=python

View File

@ -1,160 +0,0 @@
kind: pipeline
name: python-latest
workspace:
base: /app
path: .
steps:
- name: test
image: python:3
commands:
- make test-env test
- name: notify
image: drillster/drone-email
settings:
host:
from_secret: SMTP_HOST
username:
from_secret: SMTP_USER
password:
from_secret: SMTP_PASS
from: drone@iamthefij.com
when:
status: [ changed, failure ]
---
kind: pipeline
name: python-3.4
workspace:
base: /app
path: .
steps:
- name: test
image: python:3.4
commands:
- make test-env test
- name: notify
image: drillster/drone-email
settings:
host:
from_secret: SMTP_HOST
username:
from_secret: SMTP_USER
password:
from_secret: SMTP_PASS
from: drone@iamthefij.com
when:
status: [ changed, failure ]
---
kind: pipeline
name: python-3.6
workspace:
base: /app
path: .
steps:
- name: test
image: python:3.6
commands:
- make test-env test
- name: notify
image: drillster/drone-email
settings:
host:
from_secret: SMTP_HOST
username:
from_secret: SMTP_USER
password:
from_secret: SMTP_PASS
from: drone@iamthefij.com
when:
status: [ changed, failure ]
---
kind: pipeline
name: python-3.7
workspace:
base: /app
path: .
steps:
- name: test
image: python:3.7
commands:
- make test-env test
- name: notify
image: drillster/drone-email
settings:
host:
from_secret: SMTP_HOST
username:
from_secret: SMTP_USER
password:
from_secret: SMTP_PASS
from: drone@iamthefij.com
when:
status: [ changed, failure ]
---
kind: pipeline
name: deploy
depends_on:
- python-latest
- python-3.6
- python-3.7
workspace:
base: /app
path: .
steps:
- name: push to docker hub
image: plugins/docker
settings:
repo: iamthefij/minitor
auto_tag: true
username:
from_secret: docker_username
password:
from_secret: docker_password
when:
branch:
- master
event:
- push
- tag
- name: push to pypi
image: python:3
commands:
- make build-env upload
environment:
TWINE_USERNAME:
from_secret: PYPI_USERNAME
TWINE_PASSWORD:
from_secret: PYPI_PASSWORD
when:
event: [ tag ]
- name: notify
image: drillster/drone-email
settings:
host:
from_secret: SMTP_HOST
username:
from_secret: SMTP_USER
password:
from_secret: SMTP_PASS
from: drone@iamthefij.com

1
.gitignore vendored
View File

@ -60,3 +60,4 @@ docs/_build/
target/
config.yml
.mypy_cache/

View File

@ -1,17 +1,15 @@
repos:
- repo: https://github.com/psf/black
rev: 22.3.0
hooks:
- id: black
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v1.2.3
hooks:
- id: trailing-whitespace
- id: end-of-file-fixer
- id: autopep8-wrapper
args:
- -i
- --ignore=E265,E309,E501
- id: debug-statements
language_version: python3
- id: flake8
language_version: python3
- id: check-yaml
args:
- --allow-multiple-documents
@ -19,7 +17,7 @@ repos:
- id: name-tests-test
exclude: tests/(common.py|util.py|(helpers|integration/factories)/(.+).py)
- repo: https://github.com/asottile/reorder_python_imports
sha: v1.0.1
rev: v1.0.1
hooks:
- id: reorder-python-imports
args:

View File

@ -1,9 +1,17 @@
---
dist: xenial
language: python
python:
- "3.4"
- "3.6"
- "3.7"
matrix:
include:
- python: "3.5"
env: TOXENV=py3
- python: "3.6"
env: TOXENV=py3
- python: "3.7"
env: TOXENV=py3
- python: "pypy3.5"
env: TOXENV=pypy3
install:
- pip install tox
script:

View File

@ -1,20 +1,38 @@
FROM python:3
ARG REPO=library
FROM ${REPO}/python:3-alpine
LABEL maintainer="ian@iamthefij.com"
# Minitor: https://git.iamthefij.com/iamthefij/minitor
COPY ./sample-config.yml /app/config.yml
# This should be the target qemu arch
ARG ARCH=x86_64
COPY ./build/qemu-${ARCH}-static /usr/bin/
# Add common checking tools
RUN apk --no-cache add bash=~5.1 curl=~7.80 jq=~1.6
WORKDIR /app
# Add minitor user for running as non-root
RUN addgroup -S minitor && adduser -S minitor -G minitor
# Expose default metrics port
EXPOSE 8080
# Copy default sample config
COPY ./sample-config.yml /app/config.yml
# Copy Python package to container
COPY ./README.md /app/
COPY ./setup.py /app/
COPY ./minitor /app/minitor
RUN pip install -e .
RUN pip install --no-cache-dir -e .
# Copy scripts
COPY ./scripts /app/scripts
# Allow all users to execute minitor and scripts
RUN chmod -R 755 /app
# Drop to non-root user
USER minitor
ENTRYPOINT [ "python3", "-m", "minitor.main" ]

130
Makefile
View File

@ -1,46 +1,66 @@
DOCKER_TAG := minitor-dev
OPEN_CMD := $(shell type xdg-open &> /dev/null && echo 'xdg-open' || echo 'open')
ENV := env
.PHONY: default
default: test
# Builds the python3 venv with all dev requirements
env:
python3 -m venv env
./env/bin/pip install -r requirements-dev.txt
# Create sample config
config.yml:
cp sample-config.yml config.yml
# Runs Minitor
.PHONY: run
run: env
./env/bin/python -m minitor.main -vvv
# Creates virtualenv
$(ENV):
python3 -m venv $(ENV)
# Runs Minitor with metrics
.PHONY: run-metrics
run-metrics: env
./env/bin/python -m minitor.main --metrics
# Install minitor and dependencies in virtualenv
$(ENV)/bin/minitor: $(ENV)
$(ENV)/bin/pip install -r requirements-dev.txt
# Install tox into virtualenv for running tests
$(ENV)/bin/tox: $(ENV)
$(ENV)/bin/pip install tox
# Install wheel for building packages
$(ENV)/bin/wheel: $(ENV)
$(ENV)/bin/pip install wheel
# Install twine for uploading packages
$(ENV)/bin/twine: $(ENV)
$(ENV)/bin/pip install twine
# Installs dev requirements to virtualenv
.PHONY: devenv
devenv: $(ENV)/bin/minitor
# Generates a smaller env for running tox, which builds it's own env
.PHONY: test-env
test-env:
python3 -m venv env
./env/bin/pip install tox
# Runs tests with tox
.PHONY: test
test: env
./env/bin/tox
test-env: $(ENV)/bin/tox
# Generates a small build env for building and uploading dists
.PHONY: build-env
build-env:
python3 -m venv env
./env/bin/pip install twine wheel
build-env: $(ENV)/bin/twine $(ENV)/bin/wheel
# Runs Minitor
.PHONY: run
run: $(ENV)/bin/minitor config.yml
$(ENV)/bin/minitor -vvv
# Runs Minitor with metrics
.PHONY: run-metrics
run-metrics: $(ENV)/bin/minitor config.yml
$(ENV)/bin/minitor -vvv --metrics
# Runs tests with tox
.PHONY: test
test: $(ENV)/bin/tox
$(ENV)/bin/tox -e py3
# Builds wheel for package to upload
.PHONY: build
build: env
./env/bin/python setup.py sdist
./env/bin/python setup.py bdist_wheel
build: $(ENV)/bin/wheel
$(ENV)/bin/python setup.py sdist
$(ENV)/bin/python setup.py bdist_wheel
# Verify that the python version matches the git tag so we don't push bad shas
.PHONY: verify-tag-version
@ -50,13 +70,13 @@ verify-tag-version:
# Uses twine to upload to pypi
.PHONY: upload
upload: verify-tag-version build
./env/bin/twine upload dist/*
upload: verify-tag-version build $(ENV)/bin/twine
$(ENV)/bin/twine upload dist/*
# Uses twine to upload to test pypi
.PHONY: upload-test
upload-test: verify-tag-version build
./env/bin/twine upload --repository-url https://test.pypi.org/legacy/ dist/*
upload-test: verify-tag-version build $(ENV)/bin/twine
$(ENV)/bin/twine upload --repository-url https://test.pypi.org/legacy/ dist/*
# Cleans all build, runtime, and test artifacts
.PHONY: clean
@ -68,26 +88,62 @@ clean:
# Cleans dist and env
.PHONY: dist-clean
dist-clean: clean
rm -fr ./dist ./env
rm -fr ./dist $(ENV)
# Install pre-commit hooks
.PHONY: install-hooks
install-hooks: env
./env/bin/tox -e pre-commit -- install -f --install-hooks
install-hooks: $(ENV)
$(ENV)/bin/tox -e pre-commit -- install -f --install-hooks
# Generates test coverage
.coverage:
./env/bin/tox
$(ENV)/bin/tox
# Builds coverage html
htmlcov/index.html: .coverage
./env/bin/coverage html
$(ENV)/bin/coverage html
# Opens coverage html in browser (on macOS and some Linux systems)
.PHONY: open-coverage
open-coverage: htmlcov/index.html
$(OPEN_CMD) htmlcov/index.html
# Docker targets
# Targets to download required qemu binaries for running on an amd64 machine
build/qemu-x86_64-static:
./get_qemu.sh x86_64
build/qemu-arm-static:
./get_qemu.sh arm
build/qemu-aarch64-static:
./get_qemu.sh aarch64
# Build Docker image for host architechture (amd64)
.PHONY: docker-build
docker-build:
docker build . -t $(DOCKER_TAG)
docker-build: build/qemu-x86_64-static
docker build . -t ${DOCKER_TAG}-linux-amd64
# Cross build for arm architechtures
.PHONY: docker-cross-build-arm
docker-cross-build-arm: build/qemu-arm-static
docker build --build-arg REPO=arm32v6 --build-arg ARCH=arm . -t ${DOCKER_TAG}-linux-arm
.PHONY: docker-cross-build-arm64
docker-cross-build-arm64: build/qemu-aarch64-static
docker build --build-arg REPO=arm64v8 --build-arg ARCH=aarch64 . -t ${DOCKER_TAG}-linux-arm64
# Run on host architechture
.PHONY: docker-run
docker-run: docker-build config.yml
docker run --rm -v $(shell pwd)/config.yml:/app/config.yml ${DOCKER_TAG}-linux-amd64
# Cross run on host architechture
.PHONY: docker-cross-run-arm
docker-cross-run-arm: docker-cross-build-arm config.yml
docker run --rm -v $(shell pwd)/config.yml:/app/config.yml ${DOCKER_TAG}-linux-arm
.PHONY: docker-cross-run-arm64
docker-cross-run-arm64: docker-cross-build-arm64 config.yml
docker run --rm -v $(shell pwd)/config.yml:/app/config.yml ${DOCKER_TAG}-linux-arm64

View File

@ -2,6 +2,10 @@
A minimal monitoring system
## Important
*This has been more or less replaced by a version written in Go. Check out [minitor-go](/iamthefij/minitor-go)*. There are no known issues with this version, but it is not really maintained anymore as I've migrated to the Go version since it uses fewer system resources.
## What does it do?
Minitor accepts a YAML configuration file with a set of commands to run and a set of alerts to execute when those commands fail. It is designed to be as simple as possible and relies on other command line tools to do checks and issue alerts.
@ -16,14 +20,14 @@ I'm running a few small services and found Sensu, Consul, Nagios, etc. to all be
Install and execute with:
```
```bash
pip install minitor
minitor
```
If locally developing you can use:
```
```bash
make run
```
@ -34,11 +38,17 @@ It will read the contents of `config.yml` and begin its loop. You could also run
You can pull this repository directly from Docker:
`docker pull iamthefij/minitor`
```bash
docker pull iamthefij/minitor
```
The Docker image uses a default `config.yml` that is copied from `sample-config.yml`. This won't really do anything for you, so when you run the Docker image, you should supply your own `config.yml` file:
`docker run -v $PWD/config.yml:/app/config.yml iamthefij/minitor`
```bash
docker run -v $PWD/config.yml:/app/config.yml iamthefij/minitor
```
Images are provided for `amd64`, `arm`, and `arm64` architechtures, but the Python package should be compatible with anything that supports Python.
## Configuring
@ -97,7 +107,7 @@ It is also possible to use the metrics endpoint for monitoring Minitor itself! T
To run minitor with metrics, use the `--metrics` (or `-m`) flag. The metrics will be served on port `8080` by default, though it can be overriden using `--metrics-port` (or `-p`)
```
```bash
minitor --metrics
# or
minitor --metrics --metrics-port 3000

View File

@ -1,3 +0,0 @@
FROM minitor-dev
RUN apt-get update && apt-get install -y jq curl && apt-get clean

View File

@ -1,7 +1,7 @@
version: '2'
services:
minitor:
build: .
build: ../..
volumes:
- ./config.yml:/app/config.yml
- /var/run/docker.sock:/var/run/docker.sock:ro

14
get_qemu.sh Executable file
View File

@ -0,0 +1,14 @@
#! /bin/bash
HOST_ARCH=x86_64
VERSION=v2.9.1-1
mkdir -p build
cd build
# Multiple args can be passed in, but in most cases (Makefile and .drone.yml) we only use one at a time
for target_arch in $*; do
wget https://github.com/multiarch/qemu-user-static/releases/download/$VERSION/${HOST_ARCH}_qemu-${target_arch}-static.tar.gz
tar -xvf ${HOST_ARCH}_qemu-${target_arch}-static.tar.gz
rm ${HOST_ARCH}_qemu-${target_arch}-static.tar.gz
done

25
manifest.tmpl Normal file
View File

@ -0,0 +1,25 @@
image: iamthefij/minitor:{{#if build.tag}}{{trimPrefix "v" build.tag}}{{else}}latest{{/if}}
{{#if build.tags}}
tags:
{{#each build.tags}}
- {{this}}
{{/each}}
{{/if}}
manifests:
-
image: iamthefij/minitor:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}linux-amd64
platform:
architecture: amd64
os: linux
-
image: iamthefij/minitor:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}linux-arm64
platform:
architecture: arm64
os: linux
variant: v8
-
image: iamthefij/minitor:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}linux-arm
platform:
architecture: arm
os: linux
variant: v7

View File

@ -16,15 +16,14 @@ from prometheus_client import start_http_server
DEFAULT_METRICS_PORT = 8080
logging.basicConfig(
level=logging.ERROR,
format='%(asctime)s %(levelname)s %(name)s %(message)s'
level=logging.ERROR, format="%(asctime)s %(levelname)s %(name)s %(message)s"
)
logging.getLogger(__name__).addHandler(logging.NullHandler())
def read_yaml(path):
"""Loads config from a YAML file with env interpolation"""
with open(path, 'r') as yaml:
with open(path, "r") as yaml:
contents = yaml.read()
return yamlenv.load(contents)
@ -35,44 +34,40 @@ def validate_monitor_settings(settings):
Note: Cannot yet validate the Alerts exist from within this class.
That will be done by Minitor later
"""
name = settings.get('name')
name = settings.get("name")
if not name:
raise InvalidMonitorException('Invalid name for monitor')
if not settings.get('command'):
raise InvalidMonitorException(
'Invalid command for monitor {}'.format(name)
)
raise InvalidMonitorException("Invalid name for monitor")
if not settings.get("command"):
raise InvalidMonitorException("Invalid command for monitor {}".format(name))
type_assertions = (
('check_interval', int),
('alert_after', int),
('alert_every', int),
("check_interval", int),
("alert_after", int),
("alert_every", int),
)
for key, val_type in type_assertions:
val = settings.get(key)
if not isinstance(val, val_type):
raise InvalidMonitorException(
'Invalid type on {}: {}. Expected {} and found {}'.format(
"Invalid type on {}: {}. Expected {} and found {}".format(
name, key, val_type.__name__, type(val).__name__
)
)
non_zero = (
'check_interval',
'alert_after',
"check_interval",
"alert_after",
)
for key in non_zero:
if settings.get(key) == 0:
raise InvalidMonitorException(
'Invalid value for {}: {}. Value cannot be 0'.format(
name, key
)
"Invalid value for {}: {}. Value cannot be 0".format(name, key)
)
def maybe_decode(bstr, encoding='utf-8'):
def maybe_decode(bstr, encoding="utf-8"):
try:
return bstr.decode(encoding)
except TypeError:
@ -82,14 +77,14 @@ def maybe_decode(bstr, encoding='utf-8'):
def call_output(*popenargs, **kwargs):
"""Similar to check_output, but instead returns output and exception"""
# So we can capture complete output, redirect sderr to stdout
kwargs.setdefault('stderr', subprocess.STDOUT)
kwargs.setdefault("stderr", subprocess.STDOUT)
output, ex = None, None
try:
output = check_output(*popenargs, **kwargs)
except CalledProcessError as e:
output, ex = e.output, e
output = output.rstrip(b'\n')
output = output.rstrip(b"\n")
return output, ex
@ -113,23 +108,23 @@ class Monitor(object):
def __init__(self, config, counter=None, logger=None):
"""Accepts a dictionary of configuration items to override defaults"""
settings = {
'alerts': ['log'],
'check_interval': 30,
'alert_after': 4,
'alert_every': -1,
"alerts": ["log"],
"check_interval": 30,
"alert_after": 4,
"alert_every": -1,
}
settings.update(config)
validate_monitor_settings(settings)
self.name = settings['name']
self.command = settings['command']
self.alert_down = settings.get('alert_down', [])
self.name = settings["name"]
self.command = settings["command"]
self.alert_down = settings.get("alert_down", [])
if not self.alert_down:
self.alert_down = settings.get('alerts', [])
self.alert_up = settings.get('alert_up', [])
self.check_interval = settings.get('check_interval')
self.alert_after = settings.get('alert_after')
self.alert_every = settings.get('alert_every')
self.alert_down = settings.get("alerts", [])
self.alert_up = settings.get("alert_up", [])
self.check_interval = settings.get("check_interval")
self.alert_after = settings.get("alert_after")
self.alert_every = settings.get("alert_every")
self.alert_count = 0
self.last_check = None
@ -140,18 +135,18 @@ class Monitor(object):
self._counter = counter
if logger is None:
self._logger = logging.getLogger(
'{}({})'.format(self.__class__.__name__, self.name)
"{}({})".format(self.__class__.__name__, self.name)
)
else:
self._logger = logger.getChild(
'{}({})'.format(self.__class__.__name__, self.name)
"{}({})".format(self.__class__.__name__, self.name)
)
def _count_check(self, is_success=True, is_alert=False):
if self._counter is not None:
self._counter.labels(
monitor=self.name,
status=('success' if is_success else 'failure'),
status=("success" if is_success else "failure"),
is_alert=is_alert,
).inc()
@ -199,7 +194,7 @@ class Monitor(object):
back_up = None
if not self.is_up():
back_up = MinitorAlert(
'{} check is up again!'.format(self.name),
"{} check is up again!".format(self.name),
self,
)
self.total_failure_count = 0
@ -215,7 +210,7 @@ class Monitor(object):
if self.total_failure_count < self.alert_after:
return
failure_count = (self.total_failure_count - self.alert_after)
failure_count = self.total_failure_count - self.alert_after
if self.alert_every > 0:
# Otherwise, we should check against our alert_every
should_alert = (failure_count % self.alert_every) == 0
@ -223,15 +218,15 @@ class Monitor(object):
# Only alert on the first failure
should_alert = failure_count == 1
else:
should_alert = (failure_count >= (2 ** self.alert_count) - 1)
should_alert = failure_count >= (2**self.alert_count) - 1
if should_alert:
self.alert_count += 1
raise MinitorAlert(
'{} check has failed {} times'.format(
"{} check has failed {} times".format(
self.name, self.total_failure_count
),
self
self,
)
def is_up(self):
@ -243,18 +238,18 @@ class Alert(object):
def __init__(self, name, config, counter=None, logger=None):
"""An alert must be named and have a config dict"""
self.name = name
self.command = config.get('command')
self.command = config.get("command")
if not self.command:
raise InvalidAlertException('Invalid alert {}'.format(self.name))
raise InvalidAlertException("Invalid alert {}".format(self.name))
self._counter = counter
if logger is None:
self._logger = logging.getLogger(
'{}({})'.format(self.__class__.__name__, self.name)
"{}({})".format(self.__class__.__name__, self.name)
)
else:
self._logger = logger.getChild(
'{}({})'.format(self.__class__.__name__, self.name)
"{}({})".format(self.__class__.__name__, self.name)
)
def _count_alert(self, monitor):
@ -277,7 +272,7 @@ class Alert(object):
def _format_datetime(self, dt):
"""Formats a datetime for an alert"""
if dt is None:
return 'Never'
return "Never"
return dt.isoformat()
def alert(self, message, monitor):
@ -313,64 +308,72 @@ class Minitor(object):
def _parse_args(self, args=None):
"""Parses command line arguments and returns them"""
parser = ArgumentParser(description='Minimal monitoring')
parser = ArgumentParser(description="Minimal monitoring")
parser.add_argument(
'--config', '-c',
dest='config_path',
default='config.yml',
help='Path to the config YAML file to use',
"--config",
"-c",
dest="config_path",
default="config.yml",
help="Path to the config YAML file to use",
)
parser.add_argument(
'--metrics', '-m',
dest='metrics',
action='store_true',
help='Start webserver with metrics',
"--metrics",
"-m",
dest="metrics",
action="store_true",
help="Start webserver with metrics",
)
parser.add_argument(
'--metrics-port', '-p',
dest='metrics_port',
"--metrics-port",
"-p",
dest="metrics_port",
type=int,
default=DEFAULT_METRICS_PORT,
help='Port to use when serving metrics',
help="Port to use when serving metrics",
)
parser.add_argument(
'--verbose', '-v',
action='count',
help=('Adjust log verbosity by increasing arg count. Default log',
'level is ERROR. Level increases with each `v`'),
"--verbose",
"-v",
action="count",
help=(
"Adjust log verbosity by increasing arg count. Default log",
"level is ERROR. Level increases with each `v`",
),
)
return parser.parse_args(args)
def _setup(self, config_path):
"""Load all setup from YAML file at provided path"""
config = read_yaml(config_path)
self.check_interval = config.get('check_interval', 30)
self.check_interval = config.get("check_interval", 30)
self.monitors = [
Monitor(
mon,
counter=self._monitor_counter,
logger=self._logger,
)
for mon in config.get('monitors', [])
for mon in config.get("monitors", [])
]
# Add default alert for logging
self.alerts = {
'log': Alert(
'log',
{'command': ['echo', '{alert_message}!']},
"log": Alert(
"log",
{"command": ["echo", "{alert_message}!"]},
counter=self._alert_counter,
logger=self._logger,
)
}
self.alerts.update({
alert_name: Alert(
alert_name,
alert,
counter=self._alert_counter,
logger=self._logger,
)
for alert_name, alert in config.get('alerts', {}).items()
})
self.alerts.update(
{
alert_name: Alert(
alert_name,
alert,
counter=self._alert_counter,
logger=self._logger,
)
for alert_name, alert in config.get("alerts", {}).items()
}
)
def _validate_monitors(self):
"""Validates monitors are valid against other config values"""
@ -378,7 +381,7 @@ class Minitor(object):
# Validate that the interval is valid
if monitor.check_interval < self.check_interval:
raise InvalidMonitorException(
'Monitor {} check interval is lower global value {}'.format(
"Monitor {} check interval is lower global value {}".format(
monitor.name, self.check_interval
)
)
@ -386,26 +389,26 @@ class Minitor(object):
for alert in chain(monitor.alert_down, monitor.alert_up):
if alert not in self.alerts:
raise InvalidMonitorException(
'Monitor {} contains an unknown alert: {}'.format(
"Monitor {} contains an unknown alert: {}".format(
monitor.name, alert
)
)
def _init_metrics(self):
self._alert_counter = Counter(
'minitor_alert_total',
'Number of Minitor alerts',
['alert', 'monitor'],
"minitor_alert_total",
"Number of Minitor alerts",
["alert", "monitor"],
)
self._monitor_counter = Counter(
'minitor_check_total',
'Number of Minitor checks',
['monitor', 'status', 'is_alert'],
"minitor_check_total",
"Number of Minitor checks",
["monitor", "status", "is_alert"],
)
self._monitor_status_gauge = Gauge(
'minitor_monitor_up_count',
'Currently responsive monitors',
['monitor'],
"minitor_monitor_up_count",
"Currently responsive monitors",
["monitor"],
)
def _loop(self):
@ -420,9 +423,7 @@ class Minitor(object):
result = monitor.check()
if result is not None:
self._logger.info(
'%s: %s',
monitor.name,
'SUCCESS' if result else 'FAILURE'
"%s: %s", monitor.name, "SUCCESS" if result else "FAILURE"
)
except MinitorAlert as minitor_alert:
self._logger.warning(minitor_alert)
@ -475,5 +476,5 @@ def main(args=None):
return 0
if __name__ == '__main__':
if __name__ == "__main__":
sys.exit(main())

View File

@ -7,19 +7,32 @@ set -e
# Checks the most recent state exit code of a Docker container
#################
container_name=$1
# Docker host will default to a socket
# To override, export DOCKER_HOST to a new hostname
DOCKER_HOST="${DOCKER_HOST:=socket}"
container_name="$1"
# Curls Docker either using a socket or URL
function curl_docker {
local path="$1"
if [ "$DOCKER_HOST" == "socket" ]; then
curl --unix-socket /var/run/docker.sock "http://localhost/$path" 2>/dev/null
else
curl "http://${DOCKER_HOST}/$path" 2>/dev/null
fi
}
# Returns caintainer ID for a given container name
function get_container_id {
local container_name=$1
curl --unix-socket /var/run/docker.sock 'http://localhost/containers/json?all=1' 2>/dev/null \
local container_name="$1"
curl_docker 'containers/json?all=1' \
| jq -r ".[] | {Id, Name: .Names[]} | select(.Name == \"/${container_name}\") | .Id"
}
# Returns container JSON
function inspect_container {
local container_id=$1
curl --unix-socket /var/run/docker.sock http://localhost/containers/$container_id/json 2>/dev/null
curl_docker "containers/$container_id/json"
}
if [ -z "$container_name" ]; then
@ -33,6 +46,6 @@ if [ -z "$container_id" ]; then
echo "ERROR: Could not find container with name: $container_name"
exit 1
fi
exit_code=$(inspect_container $container_id | jq -r .State.ExitCode)
exit_code=$(inspect_container "$container_id" | jq -r .State.ExitCode)
exit $exit_code
exit "$exit_code"

View File

@ -1,4 +1,5 @@
#! /bin/bash
set -e
#################
# docker_healthcheck.sh
@ -6,19 +7,32 @@
# Returns the results of a Docker Healthcheck for a container
#################
container_name=$1
# Docker host will default to a socket
# To override, export DOCKER_HOST to a new hostname
DOCKER_HOST="${DOCKER_HOST:=socket}"
container_name="$1"
# Curls Docker either using a socket or URL
function curl_docker {
local path="$1"
if [ "$DOCKER_HOST" == "socket" ]; then
curl --unix-socket /var/run/docker.sock "http://localhost/$path" 2>/dev/null
else
curl "http://${DOCKER_HOST}/$path" 2>/dev/null
fi
}
# Returns caintainer ID for a given container name
function get_container_id {
local container_name=$1
curl --unix-socket /var/run/docker.sock 'http://localhost/containers/json?all=1' 2>/dev/null \
local container_name="$1"
curl_docker 'containers/json?all=1' \
| jq -r ".[] | {Id, Name: .Names[]} | select(.Name == \"/${container_name}\") | .Id"
}
# Returns container JSON
function inspect_container {
local container_id=$1
curl --unix-socket /var/run/docker.sock http://localhost/containers/$container_id/json 2>/dev/null
local container_id="$1"
curl_docker "containers/$container_id/json"
}
if [ -z "$container_name" ]; then
@ -27,18 +41,18 @@ if [ -z "$container_name" ]; then
exit 1
fi
container_id=$(get_container_id $container_name)
container_id=$(get_container_id "$container_name")
if [ -z "$container_id" ]; then
echo "ERROR: Could not find container with name: $container_name"
exit 1
fi
health=$(inspect_container $container_id | jq -r '.State.Health.Status')
health=$(inspect_container "$container_id" | jq -r '.State.Health.Status')
case $health in
"null")
case "$health" in
null)
echo "No healthcheck results"
;;
"starting|healthy")
starting|healthy)
echo "Status: '$health'"
;;
*)

View File

@ -7,48 +7,49 @@ from setuptools import setup
here = path.abspath(path.dirname(__file__))
# Get the long description from the README file
with open(path.join(here, 'README.md'), encoding='utf-8') as f:
with open(path.join(here, "README.md"), encoding="utf-8") as f:
long_description = f.read()
setup(
name='minitor',
version='1.0.0',
description='A minimal monitoring tool',
name="minitor",
version="1.0.3",
description="A minimal monitoring tool",
long_description=long_description,
long_description_content_type='text/markdown',
url='https://git.iamthefij.com/iamthefij/minitor',
download_url=(
'https://git.iamthefij.com/iamthefij/minitor/archive/master.tar.gz'
),
author='Ian Fijolek',
author_email='ian@iamthefij.com',
long_description_content_type="text/markdown",
url="https://git.iamthefij.com/iamthefij/minitor",
download_url=("https://git.iamthefij.com/iamthefij/minitor/archive/master.tar.gz"),
author="Ian Fijolek",
author_email="ian@iamthefij.com",
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Topic :: System :: Monitoring',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"Intended Audience :: System Administrators",
"Topic :: System :: Monitoring",
"License :: OSI Approved :: Apache Software License",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
],
keywords='minitor monitoring alerting',
packages=find_packages(exclude=[
'contrib',
'docs',
'examples',
'scripts',
'tests',
]),
keywords="minitor monitoring alerting",
packages=find_packages(
exclude=[
"contrib",
"docs",
"examples",
"scripts",
"tests",
]
),
install_requires=[
'prometheus_client',
'yamlenv',
"prometheus_client",
"yamlenv",
],
entry_points={
'console_scripts': [
'minitor=minitor.main:main',
"console_scripts": [
"minitor=minitor.main:main",
],
},
)

0
tests/__init__.py Normal file
View File

View File

@ -5,56 +5,51 @@ import pytest
from minitor.main import Alert
from minitor.main import Monitor
from tests.util import assert_called_once_with
class TestAlert(object):
@pytest.fixture
def monitor(self):
return Monitor({
'name': 'Dummy Monitor',
'command': ['echo', 'foo'],
})
return Monitor(
{
"name": "Dummy Monitor",
"command": ["echo", "foo"],
}
)
@pytest.fixture
def echo_alert(self):
return Alert(
'log',
"log",
{
'command': [
'echo', (
'{monitor_name} has failed {failure_count} time(s)!\n'
'We have alerted {alert_count} time(s)\n'
'Last success was {last_success}\n'
'Last output was: {last_output}'
)
"command": [
"echo",
(
"{monitor_name} has failed {failure_count} time(s)!\n"
"We have alerted {alert_count} time(s)\n"
"Last success was {last_success}\n"
"Last output was: {last_output}"
),
]
}
},
)
@pytest.mark.parametrize(
'last_success,expected_success',
[
(None, 'Never'),
(datetime(2018, 4, 10), '2018-04-10T00:00:00')
]
"last_success,expected_success",
[(None, "Never"), (datetime(2018, 4, 10), "2018-04-10T00:00:00")],
)
def test_simple_alert(
self,
monitor,
echo_alert,
last_success,
expected_success
):
def test_simple_alert(self, monitor, echo_alert, last_success, expected_success):
monitor.alert_count = 1
monitor.last_output = 'beep boop'
monitor.last_output = "beep boop"
monitor.last_success = last_success
monitor.total_failure_count = 1
with patch.object(echo_alert._logger, 'error') as mock_error:
echo_alert.alert('Exception message', monitor)
mock_error.assert_called_once_with(
'Dummy Monitor has failed 1 time(s)!\n'
'We have alerted 1 time(s)\n'
'Last success was ' + expected_success + '\n'
'Last output was: beep boop'
)
with patch.object(echo_alert._logger, "error") as mock_error:
echo_alert.alert("Exception message", monitor)
assert_called_once_with(
mock_error,
"Dummy Monitor has failed 1 time(s)!\n"
"We have alerted 1 time(s)\n"
"Last success was " + expected_success + "\n"
"Last output was: beep boop",
)

View File

@ -6,30 +6,31 @@ from minitor.main import Minitor
class TestMinitor(object):
def test_call_output(self):
# valid command should have result and no exception
output, ex = call_output(['echo', 'test'])
assert output == b'test'
output, ex = call_output(["echo", "test"])
assert output == b"test"
assert ex is None
output, ex = call_output(['ls', '--not-a-real-flag'])
assert output.startswith(b'ls: ')
output, ex = call_output(["ls", "--not-a-real-flag"])
assert output.startswith(b"ls: ")
assert ex is not None
def test_run(self):
"""Doesn't really check much, but a simple integration sanity test"""
test_loop_count = 5
os.environ.update({
'MAILGUN_API_KEY': 'test-mg-key',
'AVAILABLE_NUMBER': '555-555-5050',
'MY_PHONE': '555-555-0505',
'ACCOUNT_SID': 'test-account-id',
'AUTH_TOKEN': 'test-account-token',
})
args = '--config ./sample-config.yml'.split(' ')
os.environ.update(
{
"MAILGUN_API_KEY": "test-mg-key",
"AVAILABLE_NUMBER": "555-555-5050",
"MY_PHONE": "555-555-0505",
"ACCOUNT_SID": "test-account-id",
"AUTH_TOKEN": "test-account-token",
}
)
args = "--config ./sample-config.yml".split(" ")
minitor = Minitor()
with patch.object(minitor, '_loop'):
with patch.object(minitor, "_loop"):
minitor.run(args)
# Skip the loop, but run a single check
for _ in range(test_loop_count):

View File

@ -7,43 +7,48 @@ from minitor.main import InvalidMonitorException
from minitor.main import MinitorAlert
from minitor.main import Monitor
from minitor.main import validate_monitor_settings
from tests.util import assert_called_once
class TestMonitor(object):
@pytest.fixture
def monitor(self):
return Monitor({
'name': 'Sample Monitor',
'command': ['echo', 'foo'],
'alert_down': ['log'],
'alert_up': ['log'],
'check_interval': 1,
'alert_after': 1,
'alert_every': 1,
})
return Monitor(
{
"name": "Sample Monitor",
"command": ["echo", "foo"],
"alert_down": ["log"],
"alert_up": ["log"],
"check_interval": 1,
"alert_after": 1,
"alert_every": 1,
}
)
@pytest.mark.parametrize('settings', [
{'alert_after': 0},
{'alert_every': 0},
{'check_interval': 0},
{'alert_after': 'invalid'},
{'alert_every': 'invalid'},
{'check_interval': 'invalid'},
])
@pytest.mark.parametrize(
"settings",
[
{"alert_after": 0},
{"alert_every": 0},
{"check_interval": 0},
{"alert_after": "invalid"},
{"alert_every": "invalid"},
{"check_interval": "invalid"},
],
)
def test_monitor_invalid_configuration(self, settings):
with pytest.raises(InvalidMonitorException):
validate_monitor_settings(settings)
@pytest.mark.parametrize(
'alert_after',
"alert_after",
[1, 20],
ids=lambda arg: 'alert_after({})'.format(arg),
ids=lambda arg: "alert_after({})".format(arg),
)
@pytest.mark.parametrize(
'alert_every',
"alert_every",
[-1, 1, 2, 1440],
ids=lambda arg: 'alert_every({})'.format(arg),
ids=lambda arg: "alert_every({})".format(arg),
)
def test_monitor_alert_after(self, monitor, alert_after, alert_every):
monitor.alert_after = alert_after
@ -58,14 +63,14 @@ class TestMonitor(object):
monitor.failure()
@pytest.mark.parametrize(
'alert_after',
"alert_after",
[1, 20],
ids=lambda arg: 'alert_after({})'.format(arg),
ids=lambda arg: "alert_after({})".format(arg),
)
@pytest.mark.parametrize(
'alert_every',
"alert_every",
[1, 2, 1440],
ids=lambda arg: 'alert_every({})'.format(arg),
ids=lambda arg: "alert_every({})".format(arg),
)
def test_monitor_alert_every(self, monitor, alert_after, alert_every):
monitor.alert_after = alert_after
@ -101,27 +106,27 @@ class TestMonitor(object):
else:
monitor.failure()
@pytest.mark.parametrize('last_check', [None, datetime(2018, 4, 10)])
@pytest.mark.parametrize("last_check", [None, datetime(2018, 4, 10)])
def test_monitor_should_check(self, monitor, last_check):
monitor.last_check = last_check
assert monitor.should_check()
def test_monitor_check_fail(self, monitor):
assert monitor.last_output is None
with patch.object(monitor, 'failure') as mock_failure:
monitor.command = ['ls', '--not-real']
with patch.object(monitor, "failure") as mock_failure:
monitor.command = ["ls", "--not-real"]
assert not monitor.check()
mock_failure.assert_called_once()
assert_called_once(mock_failure)
assert monitor.last_output is not None
def test_monitor_check_success(self, monitor):
assert monitor.last_output is None
with patch.object(monitor, 'success') as mock_success:
with patch.object(monitor, "success") as mock_success:
assert monitor.check()
mock_success.assert_called_once()
assert_called_once(mock_success)
assert monitor.last_output is not None
@pytest.mark.parametrize('failure_count', [0, 1])
@pytest.mark.parametrize("failure_count", [0, 1])
def test_monitor_success(self, monitor, failure_count):
monitor.alert_count = 0
monitor.total_failure_count = failure_count

12
tests/util.py Normal file
View File

@ -0,0 +1,12 @@
from unittest import mock
def assert_called_once(mocked):
"""Safe convenient methods for mock asserts"""
assert mocked.call_count == 1
def assert_called_once_with(mocked, *args, **kwargs):
"""Safe convenient methods for mock asserts"""
assert_called_once(mocked)
assert mocked.call_args == mock.call(*args, **kwargs)

View File

@ -1,5 +1,5 @@
[tox]
envlist = py3
envlist = py3,pypy3
[testenv]
deps =