Commit f30202e8 authored by hukx.michael's avatar hukx.michael

init

parents
[run]
branch = True
source = prometheus_client
omit =
prometheus_client/decorator.py
[paths]
source =
prometheus_client
.tox/*/lib/python*/site-packages/prometheus_client
.tox/pypy/site-packages/prometheus_client
[report]
show_missing = True
\ No newline at end of file
build
dist
*.egg-info
*.pyc
*.swp
.coverage.*
.coverage
.tox
sudo: false
cache:
directories:
- $HOME/.cache/pip
# Pending https://github.com/travis-ci/travis-ci/issues/5027
before_install:
- |
if [ "$TRAVIS_PYTHON_VERSION" = "pypy" ]; then
export PYENV_ROOT="$HOME/.pyenv"
if [ -f "$PYENV_ROOT/bin/pyenv" ]; then
cd "$PYENV_ROOT" && git pull
else
rm -rf "$PYENV_ROOT" && git clone --depth 1 https://github.com/yyuu/pyenv.git "$PYENV_ROOT"
fi
export PYPY_VERSION="4.0.1"
"$PYENV_ROOT/bin/pyenv" install "pypy-$PYPY_VERSION"
virtualenv --python="$PYENV_ROOT/versions/pypy-$PYPY_VERSION/bin/python" "$HOME/virtualenvs/pypy-$PYPY_VERSION"
source "$HOME/virtualenvs/pypy-$PYPY_VERSION/bin/activate"
fi
language: python
matrix:
include:
- python: "2.6"
env: TOXENV=py26
- python: "2.7"
env: TOXENV=py27
- python: "2.7"
env: TOXENV=py27-nooptionals
- python: "3.4"
env: TOXENV=py34
- python: "3.5"
env: TOXENV=py35
- python: "3.5"
env: TOXENV=py35-nooptionals
- python: "pypy"
env: TOXENV=pypy
install:
- pip install tox
script:
- tox
notifications:
email: false
# Contributing
Prometheus uses GitHub to manage reviews of pull requests.
* If you have a trivial fix or improvement, go ahead and create a pull request,
addressing (with `@...`) the maintainer of this repository (see
[MAINTAINERS.md](MAINTAINERS.md)) in the description of the pull request.
* If you plan to do something more involved, first discuss your ideas
on our [mailing list](https://groups.google.com/forum/?fromgroups#!forum/prometheus-developers).
This will avoid unnecessary work and surely give you and us a good deal
of inspiration.
This diff is collapsed.
* Brian Brazil <brian.brazil@robustperception.io>
Prometheus instrumentation library for Python applications
Copyright 2015 The Prometheus Authors
This product bundles decorator 4.0.10 which is available under a "2-clause BSD"
license. For details, see prometheus_client/decorator.py.
This diff is collapsed.
#!/usr/bin/python
from . import core
from . import exposition
from . import process_collector
from . import platform_collector
__all__ = ['Counter', 'Gauge', 'Summary', 'Histogram']
# http://stackoverflow.com/questions/19913653/no-unicode-in-all-for-a-packages-init
__all__ = [n.encode('ascii') for n in __all__]
CollectorRegistry = core.CollectorRegistry
REGISTRY = core.REGISTRY
Metric = core.Metric
Counter = core.Counter
Gauge = core.Gauge
Summary = core.Summary
Histogram = core.Histogram
CONTENT_TYPE_LATEST = exposition.CONTENT_TYPE_LATEST
generate_latest = exposition.generate_latest
MetricsHandler = exposition.MetricsHandler
make_wsgi_app = exposition.make_wsgi_app
start_http_server = exposition.start_http_server
start_wsgi_server = exposition.start_wsgi_server
write_to_textfile = exposition.write_to_textfile
push_to_gateway = exposition.push_to_gateway
pushadd_to_gateway = exposition.pushadd_to_gateway
delete_from_gateway = exposition.delete_from_gateway
instance_ip_grouping_key = exposition.instance_ip_grouping_key
ProcessCollector = process_collector.ProcessCollector
PROCESS_COLLECTOR = process_collector.PROCESS_COLLECTOR
PlatformCollector = platform_collector.PlatformCollector
PLATFORM_COLLECTOR = platform_collector.PLATFORM_COLLECTOR
if __name__ == '__main__':
c = Counter('cc', 'A counter')
c.inc()
g = Gauge('gg', 'A gauge')
g.set(17)
s = Summary('ss', 'A summary', ['a', 'b'])
s.labels('c', 'd').observe(17)
h = Histogram('hh', 'A histogram')
h.observe(.6)
start_http_server(8000)
import time
while True:
time.sleep(1)
#!/usr/bin/python
from __future__ import unicode_literals
import logging
import re
import socket
import time
import threading
from timeit import default_timer
from .. import core
# Roughly, have to keep to what works as a file name.
# We also remove periods, so labels can be distinguished.
_INVALID_GRAPHITE_CHARS = re.compile(r"[^a-zA-Z0-9_-]")
def _sanitize(s):
return _INVALID_GRAPHITE_CHARS.sub('_', s)
class _RegularPush(threading.Thread):
def __init__(self, pusher, interval, prefix):
super(_RegularPush, self).__init__()
self._pusher = pusher
self._interval = interval
self._prefix = prefix
def run(self):
wait_until = default_timer()
while True:
while True:
now = default_timer()
if now >= wait_until:
# May need to skip some pushes.
while wait_until < now:
wait_until += self._interval
break
# time.sleep can return early.
time.sleep(wait_until - now)
try:
self._pusher.push(prefix=self._prefix)
except IOError:
logging.exception("Push failed")
class GraphiteBridge(object):
def __init__(self, address, registry=core.REGISTRY, timeout_seconds=30, _timer=time.time):
self._address = address
self._registry = registry
self._timeout = timeout_seconds
self._timer = _timer
def push(self, prefix=''):
now = int(self._timer())
output = []
prefixstr = ''
if prefix:
prefixstr = prefix + '.'
for metric in self._registry.collect():
for name, labels, value in metric.samples:
if labels:
labelstr = '.' + '.'.join(
['{0}.{1}'.format(
_sanitize(k), _sanitize(v))
for k, v in sorted(labels.items())])
else:
labelstr = ''
output.append('{0}{1}{2} {3} {4}\n'.format(
prefixstr, _sanitize(name), labelstr, float(value), now))
conn = socket.create_connection(self._address, self._timeout)
conn.sendall(''.join(output).encode('ascii'))
conn.close()
def start(self, interval=60.0, prefix=''):
t = _RegularPush(self, interval, prefix)
t.daemon = True
t.start()
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
#!/usr/bin/python
from __future__ import unicode_literals
import glob
import json
import os
import shelve
from . import core
class MultiProcessCollector(object):
"""Collector for files for multi-process mode."""
def __init__(self, registry, path=os.environ.get('prometheus_multiproc_dir')):
self._path = path
if registry:
registry.register(self)
def collect(self):
metrics = {}
for f in glob.glob(os.path.join(self._path, '*.db')):
parts = os.path.basename(f).split('_')
typ = parts[0]
d = core._MmapedDict(f)
for key, value in d.read_all_values():
metric_name, name, labelnames, labelvalues = json.loads(key)
metrics.setdefault(metric_name, core.Metric(metric_name, 'Multiprocess metric', typ))
metric = metrics[metric_name]
if typ == 'gauge':
pid = parts[2][:-3]
metric._multiprocess_mode = parts[1]
metric.add_sample(name, tuple(zip(labelnames, labelvalues)) + (('pid', pid), ), value)
else:
# The duplicates and labels are fixed in the next for.
metric.add_sample(name, tuple(zip(labelnames, labelvalues)), value)
d.close()
for metric in metrics.values():
samples = {}
buckets = {}
latest_ts = None
for name, labels, value in metric.samples:
if value[1] is not None:
latest_ts = max(latest_ts, value[1])
if metric.type == 'gauge':
without_pid = tuple([l for l in labels if l[0] != 'pid'])
if metric._multiprocess_mode == 'min':
samples.setdefault((name, without_pid), value)
if samples[(name, without_pid)][0] > value[0]:
samples[(name, without_pid)] = value
elif metric._multiprocess_mode == 'max':
samples.setdefault((name, without_pid), value)
if samples[(name, without_pid)][0] < value[0]:
samples[(name, without_pid)] = value
elif metric._multiprocess_mode == 'livesum':
samples.setdefault((name, without_pid), [0.0, None])
samples[(name, without_pid)][0] += value[0]
samples[(name, without_pid)][1] = latest_ts
else: # all/liveall
samples[(name, labels)] = value
elif metric.type == 'histogram':
bucket = [float(l[1]) for l in labels if l[0] == 'le']
if bucket:
# _bucket
without_le = tuple([l for l in labels if l[0] != 'le'])
buckets.setdefault(without_le, {})
buckets[without_le].setdefault(bucket[0], [0.0, None])
buckets[without_le][bucket[0]][0] += value[0]
buckets[without_le][bucket[0]][1] = latest_ts
else:
# _sum/_count
samples.setdefault((name, labels), [0.0, None])
samples[(name, labels)][0] += value[0]
samples[(name, labels)][1] = latest_ts
else:
# Counter and Summary.
samples.setdefault((name, labels), [0.0, None])
samples[(name, labels)][0] += value[0]
samples[(name, labels)][1] = value[1]
# Accumulate bucket values.
if metric.type == 'histogram':
for labels, values in buckets.items():
latest_ts = None
acc = 0.0
for bucket, value in sorted(values.items()):
acc += value[0]
if value[1] is not None:
latest_ts = max(latest_ts, value[1])
samples[(metric.name + '_bucket', labels + (('le', core._floatToGoString(bucket)), ))] = \
(acc, value[1])
samples[(metric.name + '_count', labels)] = (acc, latest_ts)
# Convert to correct sample format.
metric.samples = [(name, dict(labels), tuple(value)) for (name, labels), value in samples.items()]
return metrics.values()
def mark_process_dead(pid, path=os.environ.get('prometheus_multiproc_dir')):
"""Do bookkeeping for when one process dies in a multi-process setup."""
for f in glob.glob(os.path.join(path, 'gauge_livesum_{0}.db'.format(pid))):
os.remove(f)
for f in glob.glob(os.path.join(path, 'gauge_liveall_{0}.db'.format(pid))):
os.remove(f)
#!/usr/bin/python
from __future__ import unicode_literals
try:
import StringIO
except ImportError:
# Python 3
import io as StringIO
from . import core
def text_string_to_metric_families(text):
"""Parse Prometheus text format from a unicode string.
See text_fd_to_metric_families.
"""
for metric_family in text_fd_to_metric_families(StringIO.StringIO(text)):
yield metric_family
def _unescape_help(text):
result = []
slash = False
for char in text:
if slash:
if char == '\\':
result.append('\\')
elif char == 'n':
result.append('\n')
else:
result.append('\\' + char)
slash = False
else:
if char == '\\':
slash = True
else:
result.append(char)
if slash:
result.append('\\')
return ''.join(result)
def _parse_sample(text):
name = []
labelname = []
labelvalue = []
value = []
timestamp = []
labels = {}
state = 'name'
for char in text:
if state == 'name':
if char == '{':
state = 'startoflabelname'
elif char == ' ' or char == '\t':
state = 'endofname'
else:
name.append(char)
elif state == 'endofname':
if char == ' ' or char == '\t':
pass
elif char == '{':
state = 'startoflabelname'
else:
value.append(char)
state = 'value'
elif state == 'startoflabelname':
if char == ' ' or char == '\t' or char == ',':
pass
elif char == '}':
state = 'endoflabels'
else:
state = 'labelname'
labelname.append(char)
elif state == 'labelname':
if char == '=':
state = 'labelvaluequote'
elif char == ' ' or char == '\t':
state = 'labelvalueequals'
else:
labelname.append(char)
elif state == 'labelvalueequals':
if char == '=':
state = 'labelvaluequote'
elif char == ' ' or char == '\t':
pass
else:
raise ValueError("Invalid line: " + text)
elif state == 'labelvaluequote':
if char == '"':
state = 'labelvalue'
elif char == ' ' or char == '\t':
pass
else:
raise ValueError("Invalid line: " + text)
elif state == 'labelvalue':
if char == '\\':
state = 'labelvalueslash'
elif char == '"':
labels[''.join(labelname)] = ''.join(labelvalue)
labelname = []
labelvalue = []
state = 'nextlabel'
else:
labelvalue.append(char)
elif state == 'labelvalueslash':
state = 'labelvalue'
if char == '\\':
labelvalue.append('\\')
elif char == 'n':
labelvalue.append('\n')
elif char == '"':
labelvalue.append('"')
else:
labelvalue.append('\\' + char)
elif state == 'nextlabel':
if char == ',':
state = 'startoflabelname'
elif char == '}':
state = 'endoflabels'
elif char == ' ' or char == '\t':
pass
else:
raise ValueError("Invalid line: " + text)
elif state == 'endoflabels':
if char == ' ' or char == '\t':
pass
else:
value.append(char)
state = 'value'
elif state == 'value':
if char == ' ' or char == '\t':
state = 'endofvalue'
else:
value.append(char)
elif state == 'endofvalue':
if char == ' ' or char == '\t':
pass
else:
state = 'timestamp'
timestamp.append(char)
elif state == 'timestamp':
if char == ' ' or char == '\t':
break
else:
timestamp.append(char)
if len(timestamp) == 0:
timestamp = None
else:
timestamp = int(''.join(timestamp))
return (''.join(name), labels, (float(''.join(value)), timestamp))
def text_fd_to_metric_families(fd):
"""Parse Prometheus text format from a file descriptor.
This is a laxer parser than the main Go parser,
so successful parsing does not imply that the parsed
text meets the specification.
Yields core.Metric's.
"""
name = ''
documentation = ''
typ = 'untyped'
samples = []
allowed_names = []
def build_metric(name, documentation, typ, samples):
metric = core.Metric(name, documentation, typ)
metric.samples = samples
return metric
for line in fd:
line = line.strip()
if line.startswith('#'):
parts = line.split(None, 3)
if len(parts) < 2:
continue
if parts[1] == 'HELP':
if parts[2] != name:
if name != '':
yield build_metric(name, documentation, typ, samples)
# New metric
name = parts[2]
typ = 'untyped'
samples = []
allowed_names = [parts[2]]
if len(parts) == 4:
documentation = _unescape_help(parts[3])
else:
documentation = ''
elif parts[1] == 'TYPE':
if parts[2] != name:
if name != '':
yield build_metric(name, documentation, typ, samples)
# New metric
name = parts[2]
documentation = ''
samples = []
typ = parts[3]
allowed_names = {
'counter': [''],
'gauge': [''],
'summary': ['_count', '_sum', ''],
'histogram': ['_count', '_sum', '_bucket'],
}.get(typ, [''])
allowed_names = [name + n for n in allowed_names]
else:
# Ignore other comment tokens
pass
elif line == '':
# Ignore blank lines
pass
else:
sample = _parse_sample(line)
if sample[0] not in allowed_names:
if name != '':
yield build_metric(name, documentation, typ, samples)
# New metric, yield immediately as untyped singleton
name = ''
documentation = ''
typ = 'untyped'
samples = []
allowed_names = []
yield build_metric(sample[0], documentation, typ, [sample])
else:
samples.append(sample)
if name != '':
yield build_metric(name, documentation, typ, samples)
#!/usr/bin/env python
# -*- coding: utf-8
from __future__ import unicode_literals
import platform as pf
from . import core
class PlatformCollector(object):
"""Collector for python platform information"""
def __init__(self, registry=core.REGISTRY, platform=None):
self._platform = pf if platform is None else platform
info = self._info()
system = self._platform.system()
if system == "Java":
info.update(self._java())
self._metrics = [
self._add_metric("python_info", "Python platform information", info)
]
if registry:
registry.register(self)
def collect(self):
return self._metrics
@staticmethod
def _add_metric(name, documentation, data):
labels = data.keys()
values = [data[k] for k in labels]
g = core.GaugeMetricFamily(name, documentation, labels=labels)
g.add_metric(values, 1)
return g
def _info(self):
major, minor, patchlevel = self._platform.python_version_tuple()
return {
"version": self._platform.python_version(),
"implementation": self._platform.python_implementation(),
"major": major,
"minor": minor,
"patchlevel": patchlevel
}
def _java(self):
java_version, _, vminfo, osinfo = self._platform.java_ver()
vm_name, vm_release, vm_vendor = vminfo
return {
"jvm_version": java_version,
"jvm_release": vm_release,
"jvm_vendor": vm_vendor,
"jvm_name": vm_name
}
PLATFORM_COLLECTOR = PlatformCollector()
"""PlatformCollector in default Registry REGISTRY"""
#!/usr/bin/python
from __future__ import unicode_literals
import os
from . import core
try:
import resource
_PAGESIZE = resource.getpagesize()
except ImportError:
# Not Unix
_PAGESIZE = 4096
class ProcessCollector(object):
"""Collector for Standard Exports such as cpu and memory."""
def __init__(self, namespace='', pid=lambda: 'self', proc='/proc', registry=core.REGISTRY):
self._namespace = namespace
self._pid = pid
self._proc = proc
if namespace:
self._prefix = namespace + '_process_'
else:
self._prefix = 'process_'
self._ticks = 100.0
try:
self._ticks = os.sysconf('SC_CLK_TCK')
except (ValueError, TypeError, AttributeError):
pass
# This is used to test if we can access /proc.
self._btime = 0
try:
self._btime = self._boot_time()
except IOError:
pass
if registry:
registry.register(self)
def _boot_time(self):
with open(os.path.join(self._proc, 'stat')) as stat:
for line in stat:
if line.startswith('btime '):
return float(line.split()[1])
def collect(self):
if not self._btime:
return []
pid = os.path.join(self._proc, str(self._pid()).strip())
result = []
try:
with open(os.path.join(pid, 'stat')) as stat:
parts = (stat.read().split(')')[-1].split())
vmem = core.GaugeMetricFamily(self._prefix + 'virtual_memory_bytes',
'Virtual memory size in bytes.', value=float(parts[20]))
rss = core.GaugeMetricFamily(self._prefix + 'resident_memory_bytes', 'Resident memory size in bytes.',
value=float(parts[21]) * _PAGESIZE)
start_time_secs = float(parts[19]) / self._ticks
start_time = core.GaugeMetricFamily(self._prefix + 'start_time_seconds',
'Start time of the process since unix epoch in seconds.',
value=start_time_secs + self._btime)
utime = float(parts[11]) / self._ticks
stime = float(parts[12]) / self._ticks
cpu = core.CounterMetricFamily(self._prefix + 'cpu_seconds_total',
'Total user and system CPU time spent in seconds.',
value=utime + stime)
result.extend([vmem, rss, start_time, cpu])
except IOError:
pass
try:
with open(os.path.join(pid, 'limits')) as limits:
for line in limits:
if line.startswith('Max open file'):
max_fds = core.GaugeMetricFamily(self._prefix + 'max_fds',
'Maximum number of open file descriptors.',
value=float(line.split()[3]))
break
open_fds = core.GaugeMetricFamily(self._prefix + 'open_fds',
'Number of open file descriptors.',
len(os.listdir(os.path.join(pid, 'fd'))))
result.extend([open_fds, max_fds])
except IOError:
pass
return result
PROCESS_COLLECTOR = ProcessCollector()
"""Default ProcessCollector in default Registry REGISTRY."""
from ._exposition import MetricsResource
__all__ = ['MetricsResource']
from __future__ import absolute_import, unicode_literals
from .. import REGISTRY, generate_latest, CONTENT_TYPE_LATEST
from twisted.web.resource import Resource
class MetricsResource(Resource):
"""
Twisted ``Resource`` that serves prometheus metrics.
"""
isLeaf = True
def __init__(self, registry=REGISTRY):
self.registry = registry
def render_GET(self, request):
request.setHeader(b'Content-Type', CONTENT_TYPE_LATEST.encode('ascii'))
return generate_latest(self.registry)
import os
from setuptools import setup
setup(
name = "prometheus_client",
version = "0.0.19",
author = "Brian Brazil",
author_email = "brian.brazil@robustperception.io",
description = ("Python client for the Prometheus monitoring system."),
long_description = ("See https://github.com/prometheus/client_python/blob/master/README.md for documentation."),
license = "Apache Software License 2.0",
keywords = "prometheus monitoring instrumentation client",
url = "https://github.com/prometheus/client_python",
packages=['prometheus_client', 'prometheus_client.bridge', 'prometheus_client.twisted'],
extras_require={
'twisted': ['twisted'],
},
test_suite="tests",
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"Intended Audience :: Information Technology",
"Intended Audience :: System Administrators",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: PyPy",
"Topic :: System :: Monitoring",
"License :: OSI Approved :: Apache Software License",
],
)
Limit Soft Limit Hard Limit Units
Max cpu time unlimited unlimited seconds
Max file size unlimited unlimited bytes
Max data size unlimited unlimited bytes
Max stack size 8388608 unlimited bytes
Max core file size 0 unlimited bytes
Max resident set unlimited unlimited bytes
Max processes 62898 62898 processes
Max open files 2048 4096 files
Max locked memory 65536 65536 bytes
Max address space unlimited unlimited bytes
Max file locks unlimited unlimited locks
Max pending signals 62898 62898 signals
Max msgqueue size 819200 819200 bytes
Max nice priority 0 0
26231 (vim) R 5392 7446 5392 34835 7446 4218880 32533 309516 26 82 1677 44 158 99 20 0 1 0 82375 56274944 1981 18446744073709551615 4194304 6294284 140736914091744 140736914087944 139965136429984 0 0 12288 1870679807 0 0 0 17 0 0 0 31 0 0 8391624 8481048 16420864 140736914093252 140736914093279 140736914093279 140736914096107 0
1020 ((a b ) ( c d) ) R 28378 1020 28378 34842 1020 4218880 286 0 0 0 0 0 0 0 20 0 1 0 10839175 10395648 155 18446744073709551615 4194304 4238788 140736466511168 140736466511168 140609271124624 0 0 0 0 0 0 0 17 5 0 0 0 0 0 6336016 6337300 25579520 140736466515030 140736466515061 140736466515061 140736466518002 0
cpu 301854 612 111922 8979004 3552 2 3944 0 0 0
cpu0 44490 19 21045 1087069 220 1 3410 0 0 0
cpu1 47869 23 16474 1110787 591 0 46 0 0 0
cpu2 46504 36 15916 1112321 441 0 326 0 0 0
cpu3 47054 102 15683 1113230 533 0 60 0 0 0
cpu4 28413 25 10776 1140321 217 0 8 0 0 0
cpu5 29271 101 11586 1136270 672 0 30 0 0 0
cpu6 29152 36 10276 1139721 319 0 29 0 0 0
cpu7 29098 268 10164 1139282 555 0 31 0 0 0
intr 8885917 17 0 0 0 0 0 0 0 1 79281 0 0 0 0 0 0 0 231237 0 0 0 0 250586 103 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 223424 190745 13 906 1283803 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0intr 8885917 17 0 0 0 0 0 0 0 1 79281 0 0 0 0 00 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
ctxt 38014093
btime 1418183276
processes 26442
procs_running 2
procs_blocked 0
softirq 5057579 250191 1481983 1647 211099 186066 0 1783454 622196 12499 508444
This diff is collapsed.
from __future__ import unicode_literals
import sys
import threading
if sys.version_info < (2, 7):
# We need the skip decorators from unittest2 on Python 2.6.
import unittest2 as unittest
else:
import unittest
from prometheus_client import Gauge, Counter, Summary, Histogram, Metric
from prometheus_client import CollectorRegistry, generate_latest
from prometheus_client import push_to_gateway, pushadd_to_gateway, delete_from_gateway
from prometheus_client import CONTENT_TYPE_LATEST, instance_ip_grouping_key
from prometheus_client.exposition import default_handler, basic_auth_handler
try:
from BaseHTTPServer import BaseHTTPRequestHandler
from BaseHTTPServer import HTTPServer
except ImportError:
# Python 3
from http.server import BaseHTTPRequestHandler
from http.server import HTTPServer
class TestGenerateText(unittest.TestCase):
def setUp(self):
self.registry = CollectorRegistry()
def test_counter(self):
c = Counter('cc', 'A counter', registry=self.registry)
c.inc()
self.assertEqual(b'# HELP cc A counter\n# TYPE cc counter\ncc 1.0\n', generate_latest(self.registry))
def test_gauge(self):
g = Gauge('gg', 'A gauge', registry=self.registry)
g.set(17)
self.assertEqual(b'# HELP gg A gauge\n# TYPE gg gauge\ngg 17.0\n', generate_latest(self.registry))
def test_summary(self):
s = Summary('ss', 'A summary', ['a', 'b'], registry=self.registry)
s.labels('c', 'd').observe(17)
self.assertEqual(b'# HELP ss A summary\n# TYPE ss summary\nss_count{a="c",b="d"} 1.0\nss_sum{a="c",b="d"} 17.0\n', generate_latest(self.registry))
@unittest.skipIf(sys.version_info < (2, 7), "Test requires Python 2.7+.")
def test_histogram(self):
s = Histogram('hh', 'A histogram', registry=self.registry)
s.observe(0.05)
self.assertEqual(b'''# HELP hh A histogram
# TYPE hh histogram
hh_bucket{le="0.005"} 0.0
hh_bucket{le="0.01"} 0.0
hh_bucket{le="0.025"} 0.0
hh_bucket{le="0.05"} 1.0
hh_bucket{le="0.075"} 1.0
hh_bucket{le="0.1"} 1.0
hh_bucket{le="0.25"} 1.0
hh_bucket{le="0.5"} 1.0
hh_bucket{le="0.75"} 1.0
hh_bucket{le="1.0"} 1.0
hh_bucket{le="2.5"} 1.0
hh_bucket{le="5.0"} 1.0
hh_bucket{le="7.5"} 1.0
hh_bucket{le="10.0"} 1.0
hh_bucket{le="+Inf"} 1.0
hh_count 1.0
hh_sum 0.05
''', generate_latest(self.registry))
def test_unicode(self):
c = Counter('cc', '\u4500', ['l'], registry=self.registry)
c.labels('\u4500').inc()
self.assertEqual(b'# HELP cc \xe4\x94\x80\n# TYPE cc counter\ncc{l="\xe4\x94\x80"} 1.0\n', generate_latest(self.registry))
def test_escaping(self):
c = Counter('cc', 'A\ncount\\er', ['a'], registry=self.registry)
c.labels('\\x\n"').inc(1)
self.assertEqual(b'# HELP cc A\\ncount\\\\er\n# TYPE cc counter\ncc{a="\\\\x\\n\\""} 1.0\n', generate_latest(self.registry))
def test_nonnumber(self):
class MyNumber():
def __repr__(self):
return "MyNumber(123)"
def __float__(self):
return 123.0
class MyCollector():
def collect(self):
metric = Metric("nonnumber", "Non number", 'untyped')
metric.add_sample("nonnumber", {}, MyNumber())
yield metric
self.registry.register(MyCollector())
self.assertEqual(b'# HELP nonnumber Non number\n# TYPE nonnumber untyped\nnonnumber 123.0\n', generate_latest(self.registry))
class TestPushGateway(unittest.TestCase):
def setUp(self):
self.registry = CollectorRegistry()
self.counter = Gauge('g', 'help', registry=self.registry)
self.requests = requests = []
class TestHandler(BaseHTTPRequestHandler):
def do_PUT(self):
if 'with_basic_auth' in self.requestline and self.headers['authorization'] != 'Basic Zm9vOmJhcg==':
self.send_response(401)
else:
self.send_response(201)
length = int(self.headers['content-length'])
requests.append((self, self.rfile.read(length)))
self.end_headers()
do_POST = do_PUT
do_DELETE = do_PUT
httpd = HTTPServer(('localhost', 0), TestHandler)
self.address = 'http://localhost:{0}'.format(httpd.server_address[1])
class TestServer(threading.Thread):
def run(self):
httpd.handle_request()
self.server = TestServer()
self.server.daemon = True
self.server.start()
def test_push(self):
push_to_gateway(self.address, "my_job", self.registry)
self.assertEqual(self.requests[0][0].command, 'PUT')
self.assertEqual(self.requests[0][0].path, '/metrics/job/my_job')
self.assertEqual(self.requests[0][0].headers.get('content-type'), CONTENT_TYPE_LATEST)
self.assertEqual(self.requests[0][1], b'# HELP g help\n# TYPE g gauge\ng 0.0\n')
def test_push_with_groupingkey(self):
push_to_gateway(self.address, "my_job", self.registry, {'a': 9})
self.assertEqual(self.requests[0][0].command, 'PUT')
self.assertEqual(self.requests[0][0].path, '/metrics/job/my_job/a/9')
self.assertEqual(self.requests[0][0].headers.get('content-type'), CONTENT_TYPE_LATEST)
self.assertEqual(self.requests[0][1], b'# HELP g help\n# TYPE g gauge\ng 0.0\n')
def test_push_with_complex_groupingkey(self):
push_to_gateway(self.address, "my_job", self.registry, {'a': 9, 'b': 'a/ z'})
self.assertEqual(self.requests[0][0].command, 'PUT')
self.assertEqual(self.requests[0][0].path, '/metrics/job/my_job/a/9/b/a%2F+z')
self.assertEqual(self.requests[0][0].headers.get('content-type'), CONTENT_TYPE_LATEST)
self.assertEqual(self.requests[0][1], b'# HELP g help\n# TYPE g gauge\ng 0.0\n')
def test_pushadd(self):
pushadd_to_gateway(self.address, "my_job", self.registry)
self.assertEqual(self.requests[0][0].command, 'POST')
self.assertEqual(self.requests[0][0].path, '/metrics/job/my_job')
self.assertEqual(self.requests[0][0].headers.get('content-type'), CONTENT_TYPE_LATEST)
self.assertEqual(self.requests[0][1], b'# HELP g help\n# TYPE g gauge\ng 0.0\n')
def test_pushadd_with_groupingkey(self):
pushadd_to_gateway(self.address, "my_job", self.registry, {'a': 9})
self.assertEqual(self.requests[0][0].command, 'POST')
self.assertEqual(self.requests[0][0].path, '/metrics/job/my_job/a/9')
self.assertEqual(self.requests[0][0].headers.get('content-type'), CONTENT_TYPE_LATEST)
self.assertEqual(self.requests[0][1], b'# HELP g help\n# TYPE g gauge\ng 0.0\n')
def test_delete(self):
delete_from_gateway(self.address, "my_job")
self.assertEqual(self.requests[0][0].command, 'DELETE')
self.assertEqual(self.requests[0][0].path, '/metrics/job/my_job')
self.assertEqual(self.requests[0][0].headers.get('content-type'), CONTENT_TYPE_LATEST)
self.assertEqual(self.requests[0][1], b'')
def test_delete_with_groupingkey(self):
delete_from_gateway(self.address, "my_job", {'a': 9})
self.assertEqual(self.requests[0][0].command, 'DELETE')
self.assertEqual(self.requests[0][0].path, '/metrics/job/my_job/a/9')
self.assertEqual(self.requests[0][0].headers.get('content-type'), CONTENT_TYPE_LATEST)
self.assertEqual(self.requests[0][1], b'')
def test_push_with_handler(self):
def my_test_handler(url, method, timeout, headers, data):
headers.append(['X-Test-Header', 'foobar'])
return default_handler(url, method, timeout, headers, data)
push_to_gateway(self.address, "my_job", self.registry, handler=my_test_handler)
self.assertEqual(self.requests[0][0].command, 'PUT')
self.assertEqual(self.requests[0][0].path, '/metrics/job/my_job')
self.assertEqual(self.requests[0][0].headers.get('content-type'), CONTENT_TYPE_LATEST)
self.assertEqual(self.requests[0][0].headers.get('x-test-header'), 'foobar')
self.assertEqual(self.requests[0][1], b'# HELP g help\n# TYPE g gauge\ng 0.0\n')
def test_push_with_basic_auth_handler(self):
def my_auth_handler(url, method, timeout, headers, data):
return basic_auth_handler(url, method, timeout, headers, data, "foo", "bar")
push_to_gateway(self.address, "my_job_with_basic_auth", self.registry, handler=my_auth_handler)
self.assertEqual(self.requests[0][0].command, 'PUT')
self.assertEqual(self.requests[0][0].path, '/metrics/job/my_job_with_basic_auth')
self.assertEqual(self.requests[0][0].headers.get('content-type'), CONTENT_TYPE_LATEST)
self.assertEqual(self.requests[0][1], b'# HELP g help\n# TYPE g gauge\ng 0.0\n')
@unittest.skipIf(
sys.platform == "darwin",
"instance_ip_grouping_key() does not work on macOS."
)
def test_instance_ip_grouping_key(self):
self.assertTrue('' != instance_ip_grouping_key()['instance'])
if __name__ == '__main__':
unittest.main()
import unittest
import threading
try:
import SocketServer
except ImportError:
import socketserver as SocketServer
from prometheus_client import Counter, CollectorRegistry
from prometheus_client.bridge.graphite import GraphiteBridge
def fake_timer():
return 1434898897.5
class TestGraphiteBridge(unittest.TestCase):
def setUp(self):
self.registry = CollectorRegistry()
self.data = ''
class TCPHandler(SocketServer.BaseRequestHandler):
def handle(s):
self.data = s.request.recv(1024)
server = SocketServer.TCPServer(('', 0), TCPHandler)
class ServingThread(threading.Thread):
def run(self):
server.handle_request()
server.socket.close()
self.t = ServingThread()
self.t.start()
# Explicitly use localhost as the target host, since connecting to 0.0.0.0 fails on Windows
address = ('localhost', server.server_address[1])
self.gb = GraphiteBridge(address, self.registry, _timer=fake_timer)
def test_nolabels(self):
counter = Counter('c', 'help', registry=self.registry)
counter.inc()
self.gb.push()
self.t.join()
self.assertEqual(b'c 1.0 1434898897\n', self.data)
def test_labels(self):
labels = Counter('labels', 'help', ['a', 'b'], registry=self.registry)
labels.labels('c', 'd').inc()
self.gb.push()
self.t.join()
self.assertEqual(b'labels.a.c.b.d 1.0 1434898897\n', self.data)
def test_prefix(self):
labels = Counter('labels', 'help', ['a', 'b'], registry=self.registry)
labels.labels('c', 'd').inc()
self.gb.push(prefix = 'pre.fix')
self.t.join()
self.assertEqual(b'pre.fix.labels.a.c.b.d 1.0 1434898897\n', self.data)
def test_sanitizing(self):
labels = Counter('labels', 'help', ['a'], registry=self.registry)
labels.labels('c.:8').inc()
self.gb.push()
self.t.join()
self.assertEqual(b'labels.a.c__8 1.0 1434898897\n', self.data)
from __future__ import unicode_literals
import os
import shutil
import tempfile
import time
import unittest
import prometheus_client
from prometheus_client.core import *
from prometheus_client.multiprocess import *
class TestMultiProcess(unittest.TestCase):
def setUp(self):
self.tempdir = tempfile.mkdtemp()
os.environ['prometheus_multiproc_dir'] = self.tempdir
prometheus_client.core._ValueClass = prometheus_client.core._MultiProcessValue(123)
self.registry = CollectorRegistry()
MultiProcessCollector(self.registry, self.tempdir)
def tearDown(self):
del os.environ['prometheus_multiproc_dir']
shutil.rmtree(self.tempdir)
prometheus_client.core._ValueClass = prometheus_client.core._MutexValue
def test_counter_adds(self):
c1 = Counter('c', 'help', registry=None)
prometheus_client.core._ValueClass = prometheus_client.core._MultiProcessValue(456)
c2 = Counter('c', 'help', registry=None)
self.assertEqual(0, self.registry.get_sample_value('c'))
c1.inc(1)
c2.inc(2)
self.assertEqual(3, self.registry.get_sample_value('c'))
def test_summary_adds(self):
s1 = Summary('s', 'help', registry=None)
prometheus_client.core._ValueClass = prometheus_client.core._MultiProcessValue(456)
s2 = Summary('s', 'help', registry=None)
self.assertEqual(0, self.registry.get_sample_value('s_count'))
self.assertEqual(0, self.registry.get_sample_value('s_sum'))
s1.observe(1)
s2.observe(2)
self.assertEqual(2, self.registry.get_sample_value('s_count'))
self.assertEqual(3, self.registry.get_sample_value('s_sum'))
def test_histogram_adds(self):
h1 = Histogram('h', 'help', registry=None)
prometheus_client.core._ValueClass = prometheus_client.core._MultiProcessValue(456)
h2 = Histogram('h', 'help', registry=None)
self.assertEqual(0, self.registry.get_sample_value('h_count'))
self.assertEqual(0, self.registry.get_sample_value('h_sum'))
self.assertEqual(0, self.registry.get_sample_value('h_bucket', {'le': '5.0'}))
h1.observe(1)
h2.observe(2)
self.assertEqual(2, self.registry.get_sample_value('h_count'))
self.assertEqual(3, self.registry.get_sample_value('h_sum'))
self.assertEqual(2, self.registry.get_sample_value('h_bucket', {'le': '5.0'}))
def test_gauge_all(self):
g1 = Gauge('g', 'help', registry=None)
prometheus_client.core._ValueClass = prometheus_client.core._MultiProcessValue(456)
g2 = Gauge('g', 'help', registry=None)
self.assertEqual(0, self.registry.get_sample_value('g', {'pid': '123'}))
self.assertEqual(0, self.registry.get_sample_value('g', {'pid': '456'}))
g1.set(1)
g2.set(2)
mark_process_dead(123, os.environ['prometheus_multiproc_dir'])
self.assertEqual(1, self.registry.get_sample_value('g', {'pid': '123'}))
self.assertEqual(2, self.registry.get_sample_value('g', {'pid': '456'}))
def test_gauge_liveall(self):
g1 = Gauge('g', 'help', registry=None, multiprocess_mode='liveall')
prometheus_client.core._ValueClass = prometheus_client.core._MultiProcessValue(456)
g2 = Gauge('g', 'help', registry=None, multiprocess_mode='liveall')
self.assertEqual(0, self.registry.get_sample_value('g', {'pid': '123'}))
self.assertEqual(0, self.registry.get_sample_value('g', {'pid': '456'}))
g1.set(1)
g2.set(2)
self.assertEqual(1, self.registry.get_sample_value('g', {'pid': '123'}))
self.assertEqual(2, self.registry.get_sample_value('g', {'pid': '456'}))
mark_process_dead(123, os.environ['prometheus_multiproc_dir'])
self.assertEqual(None, self.registry.get_sample_value('g', {'pid': '123'}))
self.assertEqual(2, self.registry.get_sample_value('g', {'pid': '456'}))
def test_gauge_min(self):
g1 = Gauge('g', 'help', registry=None, multiprocess_mode='min')
prometheus_client.core._ValueClass = prometheus_client.core._MultiProcessValue(456)
g2 = Gauge('g', 'help', registry=None, multiprocess_mode='min')
self.assertEqual(0, self.registry.get_sample_value('g'))
g1.set(1)
g2.set(2)
self.assertEqual(1, self.registry.get_sample_value('g'))
def test_gauge_max(self):
g1 = Gauge('g', 'help', registry=None, multiprocess_mode='max')
prometheus_client.core._ValueClass = prometheus_client.core._MultiProcessValue(456)
g2 = Gauge('g', 'help', registry=None, multiprocess_mode='max')
self.assertEqual(0, self.registry.get_sample_value('g'))
g1.set(1)
g2.set(2)
self.assertEqual(2, self.registry.get_sample_value('g'))
def test_gauge_livesum(self):
g1 = Gauge('g', 'help', registry=None, multiprocess_mode='livesum')
prometheus_client.core._ValueClass = prometheus_client.core._MultiProcessValue(456)
g2 = Gauge('g', 'help', registry=None, multiprocess_mode='livesum')
self.assertEqual(0, self.registry.get_sample_value('g'))
g1.set(1)
g2.set(2)
self.assertEqual(3, self.registry.get_sample_value('g'))
mark_process_dead(123, os.environ['prometheus_multiproc_dir'])
self.assertEqual(2, self.registry.get_sample_value('g'))
def test_namespace_subsystem(self):
c1 = Counter('c', 'help', registry=None, namespace='ns', subsystem='ss')
c1.inc(1)
self.assertEqual(1, self.registry.get_sample_value('ns_ss_c'))
class TestMmapedDict(unittest.TestCase):
def setUp(self):
fd, self.tempfile = tempfile.mkstemp()
os.close(fd)
self.d = core._MmapedDict(self.tempfile)
def test_process_restart(self):
self.d.write_value('abc', (123.0, None))
self.d.close()
self.d = core._MmapedDict(self.tempfile)
self.assertEqual((123, None), self.d.read_value('abc'))
self.assertEqual([('abc', (123.0, None))], list(self.d.read_all_values()))
def test_expansion(self):
key = 'a' * core._INITIAL_MMAP_SIZE
self.d.write_value(key, (123.0, None))
self.assertEqual([(key, (123.0, None))], list(self.d.read_all_values()))
def test_multi_expansion(self):
key = 'a' * core._INITIAL_MMAP_SIZE * 4
self.d.write_value('abc', (42.0, None))
self.d.write_value(key, (123.0, None))
self.d.write_value('def', (17.0, None))
self.assertEqual([('abc', (42.0, None)), (key, (123.0, None)), ('def', (17.0, None))],
list(self.d.read_all_values()))
def tearDown(self):
os.unlink(self.tempfile)
from __future__ import unicode_literals
import sys
if sys.version_info < (2, 7):
# We need the skip decorators from unittest2 on Python 2.6.
import unittest2 as unittest
else:
import unittest
from prometheus_client.core import *
from prometheus_client.exposition import *
from prometheus_client.parser import *
class TestParse(unittest.TestCase):
def test_simple_counter(self):
families = text_string_to_metric_families("""# TYPE a counter
# HELP a help
a 1
""")
self.assertEqual([CounterMetricFamily("a", "help", value=1)], list(families))
def test_simple_gauge(self):
families = text_string_to_metric_families("""# TYPE a gauge
# HELP a help
a 1
""")
self.assertEqual([GaugeMetricFamily("a", "help", value=1)], list(families))
def test_simple_summary(self):
families = text_string_to_metric_families("""# TYPE a summary
# HELP a help
a_count 1
a_sum 2
""")
def test_summary_quantiles(self):
families = text_string_to_metric_families("""# TYPE a summary
# HELP a help
a_count 1
a_sum 2
a{quantile="0.5"} 0.7
""")
# The Python client doesn't support quantiles, but we
# still need to be able to parse them.
metric_family = SummaryMetricFamily("a", "help", count_value=1, sum_value=2)
metric_family.add_sample("a", {"quantile": "0.5"}, (0.7, None))
self.assertEqual([metric_family], list(families))
def test_simple_histogram(self):
families = text_string_to_metric_families("""# TYPE a histogram
# HELP a help
a_bucket{le="1"} 0
a_bucket{le="+Inf"} 3
a_count 3
a_sum 2
""")
self.assertEqual([HistogramMetricFamily("a", "help", sum_value=2, buckets=[("1", 0.0), ("+Inf", 3.0)])], list(families))
def test_no_metadata(self):
families = text_string_to_metric_families("""a 1
""")
metric_family = Metric("a", "", "untyped")
metric_family.add_sample("a", {}, (1, None))
self.assertEqual([metric_family], list(families))
def test_untyped(self):
# https://github.com/prometheus/client_python/issues/79
families = text_string_to_metric_families("""# HELP redis_connected_clients Redis connected clients
# TYPE redis_connected_clients untyped
redis_connected_clients{instance="rough-snowflake-web",port="6380"} 10.0
redis_connected_clients{instance="rough-snowflake-web",port="6381"} 12.0
""")
m = Metric("redis_connected_clients", "Redis connected clients", "untyped")
m.samples = [
("redis_connected_clients", {"instance": "rough-snowflake-web", "port": "6380"}, (10, None)),
("redis_connected_clients", {"instance": "rough-snowflake-web", "port": "6381"}, (12, None)),
]
self.assertEqual([m], list(families))
def test_type_help_switched(self):
families = text_string_to_metric_families("""# HELP a help
# TYPE a counter
a 1
""")
self.assertEqual([CounterMetricFamily("a", "help", value=1)], list(families))
def test_blank_lines_and_comments(self):
families = text_string_to_metric_families("""
# TYPE a counter
# FOO a
# BAR b
# HELP a help
a 1
""")
self.assertEqual([CounterMetricFamily("a", "help", value=1)], list(families))
def test_tabs(self):
families = text_string_to_metric_families("""#\tTYPE\ta\tcounter
#\tHELP\ta\thelp
a\t1
""")
self.assertEqual([CounterMetricFamily("a", "help", value=1)], list(families))
def test_empty_help(self):
families = text_string_to_metric_families("""# TYPE a counter
# HELP a
a 1
""")
self.assertEqual([CounterMetricFamily("a", "", value=1)], list(families))
def test_labels_and_infinite(self):
families = text_string_to_metric_families("""# TYPE a counter
# HELP a help
a{foo="bar"} +Inf
a{foo="baz"} -Inf
""")
metric_family = CounterMetricFamily("a", "help", labels=["foo"])
metric_family.add_metric(["bar"], core._INF)
metric_family.add_metric(["baz"], core._MINUS_INF)
self.assertEqual([metric_family], list(families))
def test_spaces(self):
families = text_string_to_metric_families("""# TYPE a counter
# HELP a help
a{ foo = "bar" } 1
a\t\t{\t\tfoo\t\t=\t\t"baz"\t\t}\t\t2
""")
metric_family = CounterMetricFamily("a", "help", labels=["foo"])
metric_family.add_metric(["bar"], 1)
metric_family.add_metric(["baz"], 2)
self.assertEqual([metric_family], list(families))
def test_commas(self):
families = text_string_to_metric_families("""# TYPE a counter
# HELP a help
a{foo="bar",} 1
# TYPE b counter
# HELP b help
b{,} 2
""")
a = CounterMetricFamily("a", "help", labels=["foo"])
a.add_metric(["bar"], 1)
b = CounterMetricFamily("b", "help", value=2)
self.assertEqual([a, b], list(families))
def test_empty_brackets(self):
families = text_string_to_metric_families("""# TYPE a counter
# HELP a help
a{} 1
""")
self.assertEqual([CounterMetricFamily("a", "help", value=1)], list(families))
def test_nan(self):
families = text_string_to_metric_families("""a NaN
""")
# Can't use a simple comparison as nan != nan.
self.assertTrue(math.isnan(list(families)[0].samples[0][2][0]))
def test_escaping(self):
families = text_string_to_metric_families("""# TYPE a counter
# HELP a he\\n\\\\l\\tp
a{foo="b\\"a\\nr"} 1
a{foo="b\\\\a\\z"} 2
""")
metric_family = CounterMetricFamily("a", "he\n\\l\\tp", labels=["foo"])
metric_family.add_metric(["b\"a\nr"], 1)
metric_family.add_metric(["b\\a\\z"], 2)
self.assertEqual([metric_family], list(families))
@unittest.skipIf(sys.version_info < (2, 7), "Test requires Python 2.7+.")
def test_roundtrip(self):
text = """# HELP go_gc_duration_seconds A summary of the GC invocation durations.
# TYPE go_gc_duration_seconds summary
go_gc_duration_seconds{quantile="0"} 0.013300656000000001
go_gc_duration_seconds{quantile="0.25"} 0.013638736
go_gc_duration_seconds{quantile="0.5"} 0.013759906
go_gc_duration_seconds{quantile="0.75"} 0.013962066
go_gc_duration_seconds{quantile="1"} 0.021383540000000003
go_gc_duration_seconds_sum 56.12904785
go_gc_duration_seconds_count 7476.0
# HELP go_goroutines Number of goroutines that currently exist.
# TYPE go_goroutines gauge
go_goroutines 166.0
# HELP prometheus_local_storage_indexing_batch_duration_milliseconds Quantiles for batch indexing duration in milliseconds.
# TYPE prometheus_local_storage_indexing_batch_duration_milliseconds summary
prometheus_local_storage_indexing_batch_duration_milliseconds{quantile="0.5"} NaN
prometheus_local_storage_indexing_batch_duration_milliseconds{quantile="0.9"} NaN
prometheus_local_storage_indexing_batch_duration_milliseconds{quantile="0.99"} NaN
prometheus_local_storage_indexing_batch_duration_milliseconds_sum 871.5665949999999
prometheus_local_storage_indexing_batch_duration_milliseconds_count 229.0
# HELP process_cpu_seconds_total Total user and system CPU time spent in seconds.
# TYPE process_cpu_seconds_total counter
process_cpu_seconds_total 29323.4
# HELP process_virtual_memory_bytes Virtual memory size in bytes.
# TYPE process_virtual_memory_bytes gauge
process_virtual_memory_bytes 2478268416.0
# HELP prometheus_build_info A metric with a constant '1' value labeled by version, revision, and branch from which Prometheus was built.
# TYPE prometheus_build_info gauge
prometheus_build_info{branch="HEAD",revision="ef176e5",version="0.16.0rc1"} 1.0
# HELP prometheus_local_storage_chunk_ops_total The total number of chunk operations by their type.
# TYPE prometheus_local_storage_chunk_ops_total counter
prometheus_local_storage_chunk_ops_total{type="clone"} 28.0
prometheus_local_storage_chunk_ops_total{type="create"} 997844.0
prometheus_local_storage_chunk_ops_total{type="drop"} 1345758.0
prometheus_local_storage_chunk_ops_total{type="load"} 1641.0
prometheus_local_storage_chunk_ops_total{type="persist"} 981408.0
prometheus_local_storage_chunk_ops_total{type="pin"} 32662.0
prometheus_local_storage_chunk_ops_total{type="transcode"} 980180.0
prometheus_local_storage_chunk_ops_total{type="unpin"} 32662.0
"""
families = list(text_string_to_metric_families(text))
class TextCollector(object):
def collect(self):
return families
registry = CollectorRegistry()
registry.register(TextCollector())
self.assertEqual(text.encode('utf-8'), generate_latest(registry))
if __name__ == '__main__':
unittest.main()
from __future__ import unicode_literals
import unittest
from prometheus_client import CollectorRegistry, PlatformCollector
class TestPlatformCollector(unittest.TestCase):
def setUp(self):
self.registry = CollectorRegistry()
self.platform = _MockPlatform()
def test_python_info(self):
PlatformCollector(registry=self.registry, platform=self.platform)
self.assertLabels("python_info", {
"version": "python_version",
"implementation": "python_implementation",
"major": "pvt_major",
"minor": "pvt_minor",
"patchlevel": "pvt_patchlevel"
})
def test_system_info_java(self):
self.platform._system = "Java"
PlatformCollector(registry=self.registry, platform=self.platform)
self.assertLabels("python_info", {
"version": "python_version",
"implementation": "python_implementation",
"major": "pvt_major",
"minor": "pvt_minor",
"patchlevel": "pvt_patchlevel",
"jvm_version": "jv_release",
"jvm_release": "vm_release",
"jvm_vendor": "vm_vendor",
"jvm_name": "vm_name"
})
def assertLabels(self, name, labels):
for metric in self.registry.collect():
for n, l, value in metric.samples:
if n == name:
assert l == labels
return
assert False
class _MockPlatform(object):
def __init__(self):
self._system = "system"
def python_version_tuple(self):
return "pvt_major", "pvt_minor", "pvt_patchlevel"
def python_version(self):
return "python_version"
def python_implementation(self):
return "python_implementation"
def system(self):
return self._system
def java_ver(self):
return (
"jv_release",
"jv_vendor",
("vm_name", "vm_release", "vm_vendor"),
("os_name", "os_version", "os_arch")
)
from __future__ import unicode_literals
import os
import unittest
from prometheus_client import CollectorRegistry, ProcessCollector
class TestProcessCollector(unittest.TestCase):
def setUp(self):
self.registry = CollectorRegistry()
self.test_proc = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'proc')
def test_working(self):
collector = ProcessCollector(proc=self.test_proc, pid=lambda: 26231, registry=self.registry)
collector._ticks = 100
self.assertEqual(17.21, self.registry.get_sample_value('process_cpu_seconds_total'))
self.assertEqual(56274944.0, self.registry.get_sample_value('process_virtual_memory_bytes'))
self.assertEqual(8114176, self.registry.get_sample_value('process_resident_memory_bytes'))
self.assertEqual(1418184099.75, self.registry.get_sample_value('process_start_time_seconds'))
self.assertEqual(2048.0, self.registry.get_sample_value('process_max_fds'))
self.assertEqual(5.0, self.registry.get_sample_value('process_open_fds'))
self.assertEqual(None, self.registry.get_sample_value('process_fake_namespace'))
def test_namespace(self):
collector = ProcessCollector(proc=self.test_proc, pid=lambda: 26231, registry=self.registry, namespace='n')
collector._ticks = 100
self.assertEqual(17.21, self.registry.get_sample_value('n_process_cpu_seconds_total'))
self.assertEqual(56274944.0, self.registry.get_sample_value('n_process_virtual_memory_bytes'))
self.assertEqual(8114176, self.registry.get_sample_value('n_process_resident_memory_bytes'))
self.assertEqual(1418184099.75, self.registry.get_sample_value('n_process_start_time_seconds'))
self.assertEqual(2048.0, self.registry.get_sample_value('n_process_max_fds'))
self.assertEqual(5.0, self.registry.get_sample_value('n_process_open_fds'))
self.assertEqual(None, self.registry.get_sample_value('process_cpu_seconds_total'))
def test_working_584(self):
collector = ProcessCollector(proc=self.test_proc, pid=lambda: "584\n", registry=self.registry)
collector._ticks = 100
self.assertEqual(0.0, self.registry.get_sample_value('process_cpu_seconds_total'))
self.assertEqual(10395648.0, self.registry.get_sample_value('process_virtual_memory_bytes'))
self.assertEqual(634880, self.registry.get_sample_value('process_resident_memory_bytes'))
self.assertEqual(1418291667.75, self.registry.get_sample_value('process_start_time_seconds'))
self.assertEqual(None, self.registry.get_sample_value('process_max_fds'))
self.assertEqual(None, self.registry.get_sample_value('process_open_fds'))
def test_working_fake_pid(self):
collector = ProcessCollector(proc=self.test_proc, pid=lambda: 123, registry=self.registry)
collector._ticks = 100
self.assertEqual(None, self.registry.get_sample_value('process_cpu_seconds_total'))
self.assertEqual(None, self.registry.get_sample_value('process_virtual_memory_bytes'))
self.assertEqual(None, self.registry.get_sample_value('process_resident_memory_bytes'))
self.assertEqual(None, self.registry.get_sample_value('process_start_time_seconds'))
self.assertEqual(None, self.registry.get_sample_value('process_max_fds'))
self.assertEqual(None, self.registry.get_sample_value('process_open_fds'))
self.assertEqual(None, self.registry.get_sample_value('process_fake_namespace'))
if __name__ == '__main__':
unittest.main()
from __future__ import absolute_import, unicode_literals
import sys
if sys.version_info < (2, 7):
from unittest2 import skipUnless
else:
from unittest import skipUnless
from prometheus_client import Counter
from prometheus_client import CollectorRegistry, generate_latest
try:
from prometheus_client.twisted import MetricsResource
from twisted.trial.unittest import TestCase
from twisted.web.server import Site
from twisted.web.resource import Resource
from twisted.internet import reactor
from twisted.web.client import Agent
from twisted.web.client import readBody
HAVE_TWISTED = True
except ImportError:
from unittest import TestCase
HAVE_TWISTED = False
class MetricsResourceTest(TestCase):
@skipUnless(HAVE_TWISTED, "Don't have twisted installed.")
def setUp(self):
self.registry = CollectorRegistry()
def test_reports_metrics(self):
"""
``MetricsResource`` serves the metrics from the provided registry.
"""
c = Counter('cc', 'A counter', registry=self.registry)
c.inc()
root = Resource()
root.putChild(b'metrics', MetricsResource(registry=self.registry))
server = reactor.listenTCP(0, Site(root))
self.addCleanup(server.stopListening)
agent = Agent(reactor)
port = server.getHost().port
url = "http://localhost:{port}/metrics".format(port=port)
d = agent.request(b"GET", url.encode("ascii"))
d.addCallback(readBody)
d.addCallback(self.assertEqual, generate_latest(self.registry))
return d
[tox]
envlist = coverage-clean,py26,py27,py34,py35,py36,pypy,{py27,py35,py36}-nooptionals,coverage-report
[base]
deps =
coverage
pytest
[testenv]
deps =
{[base]deps}
py26: unittest2
; Twisted does not support Python 2.6.
{py27,py34,py35,pypy}: twisted
commands = coverage run --parallel -m pytest {posargs}
; Ensure test suite passes if no optional dependencies are present.
[testenv:py27-nooptionals]
deps = {[base]deps}
commands = coverage run --parallel -m pytest {posargs}
[testenv:py35-nooptionals]
deps = {[base]deps}
commands = coverage run --parallel -m pytest {posargs}
[testenv:coverage-clean]
deps = coverage
skip_install = true
commands = coverage erase
[testenv:coverage-report]
deps = coverage
skip_install = true
commands =
coverage combine
coverage report
\ No newline at end of file
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment