Initial commit
This commit is contained in:
commit
c1fa165e66
5 changed files with 289 additions and 0 deletions
4
.dockerignore
Normal file
4
.dockerignore
Normal file
|
@ -0,0 +1,4 @@
|
|||
.git
|
||||
Dockerfile
|
||||
*~
|
||||
README.md
|
125
.gitignore
vendored
Normal file
125
.gitignore
vendored
Normal file
|
@ -0,0 +1,125 @@
|
|||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
|
||||
# C extensions
|
||||
*.so
|
||||
|
||||
# Distribution / packaging
|
||||
.Python
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
wheels/
|
||||
pip-wheel-metadata/
|
||||
share/python-wheels/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
MANIFEST
|
||||
|
||||
# PyInstaller
|
||||
# Usually these files are written by a python script from a template
|
||||
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||
*.manifest
|
||||
*.spec
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
pip-delete-this-directory.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
htmlcov/
|
||||
.tox/
|
||||
.nox/
|
||||
.coverage
|
||||
.coverage.*
|
||||
.cache
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
*.cover
|
||||
*.py,cover
|
||||
.hypothesis/
|
||||
.pytest_cache/
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
*.pot
|
||||
|
||||
# Django stuff:
|
||||
*.log
|
||||
local_settings.py
|
||||
db.sqlite3
|
||||
db.sqlite3-journal
|
||||
|
||||
# Flask stuff:
|
||||
instance/
|
||||
.webassets-cache
|
||||
|
||||
# Scrapy stuff:
|
||||
.scrapy
|
||||
|
||||
# Sphinx documentation
|
||||
docs/_build/
|
||||
|
||||
# PyBuilder
|
||||
target/
|
||||
|
||||
# Jupyter Notebook
|
||||
.ipynb_checkpoints
|
||||
|
||||
# IPython
|
||||
profile_default/
|
||||
ipython_config.py
|
||||
|
||||
# pyenv
|
||||
.python-version
|
||||
|
||||
# pipenv
|
||||
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
||||
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
||||
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
||||
# install all needed dependencies.
|
||||
#Pipfile.lock
|
||||
|
||||
# celery beat schedule file
|
||||
celerybeat-schedule
|
||||
|
||||
# SageMath parsed files
|
||||
*.sage.py
|
||||
|
||||
# Environments
|
||||
.env
|
||||
.venv
|
||||
env/
|
||||
venv/
|
||||
ENV/
|
||||
env.bak/
|
||||
venv.bak/
|
||||
|
||||
# Spyder project settings
|
||||
.spyderproject
|
||||
.spyproject
|
||||
|
||||
# Rope project settings
|
||||
.ropeproject
|
||||
|
||||
# mkdocs documentation
|
||||
/site
|
||||
|
||||
# mypy
|
||||
.mypy_cache/
|
||||
.dmypy.json
|
||||
dmypy.json
|
||||
|
||||
# Pyre type checker
|
||||
.pyre/
|
27
Dockerfile
Normal file
27
Dockerfile
Normal file
|
@ -0,0 +1,27 @@
|
|||
FROM python:3.8-buster
|
||||
|
||||
LABEL maintainer="Aiden Gilmartin" \
|
||||
description="Speedtest to InfluxDB data bridge"
|
||||
|
||||
# Install dependencies
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
RUN apt-get update
|
||||
RUN apt-get -q -y install --no-install-recommends apt-utils gnupg1 apt-transport-https dirmngr
|
||||
|
||||
# Install speedtest-cli
|
||||
RUN apt-key adv --keyserver keyserver.ubuntu.com --recv-keys 379CE192D401AB61
|
||||
RUN echo "deb https://ookla.bintray.com/debian buster main" | tee /etc/apt/sources.list.d/speedtest.list
|
||||
RUN apt-get update && apt-get -q -y install speedtest
|
||||
|
||||
# Install Python packages
|
||||
RUN pip install influxdb
|
||||
|
||||
# Clean up
|
||||
RUN apt-get -q -y autoremove
|
||||
RUN apt-get -q -y clean
|
||||
RUN rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Final setup & execution
|
||||
COPY . /app
|
||||
WORKDIR /app
|
||||
CMD ["python3", "-u", "main.py"]
|
35
README.md
Normal file
35
README.md
Normal file
|
@ -0,0 +1,35 @@
|
|||
# speedtest-influx
|
||||
|
||||
This is a small Python script that will continuously run the OOKLA Speedtest CLI application, reformat the data output and forward it on to an InfluxDB database.
|
||||
|
||||
You may want to do this so that you can track your internet connections consistency over time. Using Grafana you can view and explore this data easily.
|
||||
|
||||
![Grafana Dashboard](https://i.imgur.com/8cUdMy7.png)
|
||||
|
||||
## Using the script
|
||||
|
||||
### Option 1 - No Container
|
||||
|
||||
1. [Install the OOKA Speedtest CLI application.](https://www.speedtest.net/apps/cli)
|
||||
|
||||
2. Install the InfluxDB client for library from Python.
|
||||
|
||||
`pip install influxdb`
|
||||
|
||||
3. Run the script.
|
||||
|
||||
`python3 ./main.py`
|
||||
|
||||
### Option 2 - Run with Docker/Podman
|
||||
|
||||
1. Build the container.
|
||||
|
||||
`docker build -t aidengilmartin/speedtest-influx ./`
|
||||
|
||||
`podman build -t aidengilmartin/speedtest-influx ./`
|
||||
|
||||
2. Run the container.
|
||||
|
||||
`docker run -d --name speedtest-influx aidengilmartin/speedtest-influx`
|
||||
|
||||
`podman run -d --name speedtest-influx aidengilmartin/speedtest-influx`
|
98
main.py
Executable file
98
main.py
Executable file
|
@ -0,0 +1,98 @@
|
|||
import time
|
||||
import json
|
||||
import subprocess
|
||||
|
||||
from influxdb import InfluxDBClient
|
||||
|
||||
# InfluxDB Settings
|
||||
DB_ADDRESS = 'db_hostname.network'
|
||||
DB_PORT = 8086
|
||||
DB_USER = 'db_username'
|
||||
DB_PASSWORD = 'db_password'
|
||||
DB_DATABASE = 'speedtest_db'
|
||||
|
||||
# Speedtest Settings
|
||||
TEST_INTERVAL = 900 # Time between tests (in seconds).
|
||||
TEST_FAIL_INTERVAL = 60 # Time before retrying a failed Speedtest (in seconds).
|
||||
|
||||
influxdb_client = InfluxDBClient(
|
||||
DB_ADDRESS, DB_PORT, DB_USER, DB_PASSWORD, None)
|
||||
|
||||
|
||||
def init_db():
|
||||
databases = influxdb_client.get_list_database()
|
||||
|
||||
if len(list(filter(lambda x: x['name'] == DB_DATABASE, databases))) == 0:
|
||||
influxdb_client.create_database(
|
||||
DB_DATABASE) # Create if does not exist.
|
||||
else:
|
||||
influxdb_client.switch_database(DB_DATABASE) # Switch to if does exist.
|
||||
|
||||
|
||||
def format_for_influx(cliout):
|
||||
data = json.loads(cliout)
|
||||
# There is additional data in the speedtest-cli output but it is likely not necessary to store.
|
||||
influx_data = [
|
||||
{
|
||||
'measurement': 'ping',
|
||||
'time': data['timestamp'],
|
||||
'fields': {
|
||||
'jitter': data['ping']['jitter'],
|
||||
'latency': data['ping']['latency']
|
||||
}
|
||||
},
|
||||
{
|
||||
'measurement': 'download',
|
||||
'time': data['timestamp'],
|
||||
'fields': {
|
||||
# Byte to Megabit
|
||||
'bandwidth': data['download']['bandwidth'] / 125000,
|
||||
'bytes': data['download']['bytes'],
|
||||
'elapsed': data['download']['elapsed']
|
||||
}
|
||||
},
|
||||
{
|
||||
'measurement': 'upload',
|
||||
'time': data['timestamp'],
|
||||
'fields': {
|
||||
# Byte to Megabit
|
||||
'bandwidth': data['upload']['bandwidth'] / 125000,
|
||||
'bytes': data['upload']['bytes'],
|
||||
'elapsed': data['upload']['elapsed']
|
||||
}
|
||||
},
|
||||
{
|
||||
'measurement': 'packetLoss',
|
||||
'time': data['timestamp'],
|
||||
'fields': {
|
||||
'packetLoss': data['packetLoss']
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
return influx_data
|
||||
|
||||
|
||||
def main():
|
||||
init_db() # Setup the database if it does not already exist.
|
||||
|
||||
while (1): # Run a Speedtest and send the results to influxDB indefinitely.
|
||||
speedtest = subprocess.run(
|
||||
["speedtest", "--accept-license", "--accept-gdpr", "-f", "json"], capture_output=True)
|
||||
|
||||
if speedtest.returncode == 0: # Speedtest was successful.
|
||||
data = format_for_influx(speedtest.stdout)
|
||||
print("Speedtest Successful:")
|
||||
if influxdb_client.write_points(data) == True:
|
||||
print("Data written to DB successfully")
|
||||
time.sleep(TEST_INTERVAL)
|
||||
else: # Speedtest failed.
|
||||
print("Speedtest Failed:")
|
||||
print(speedtest.stderr)
|
||||
print(speedtest.stdout)
|
||||
time.sleep(TEST_FAIL_INTERVAL)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
print('Speedtest CLI Data Logger to InfluxDB')
|
||||
main()
|
Loading…
Reference in a new issue