Im trying to write a combined RED metrics tracker as a context manager, which is supposed to increment the relevant metrics when used.
However, it appears that either a) my metrics are not called (which I cannot imagine to be the case) or I'm not fetching them right, as REGISTRY.get_sample_value('<metric_name>', [method, path])
returns None
.
Given the following Code and tests:
>>> metrics_tracker.py
from prometheus_client import Counter, Histogram
HTTP_REQUESTS_TOTAL = Counter('http_requests_total', 'Total amount of HTTP Requests made.', labelnames=['method', 'path'])
HTTP_EXCEPTIONS_TOTAL = Counter('http_exceptions_total', 'Total amount of HTTP exceptions.', labelnames=['method', 'path'])
HTTP_REQUESTS_LATENCY = Histogram('http_requests_latency_seconds', 'Duration of HTTP requests processing.', labelnames=['method', 'path'])
class REDMetricsTracker:
"""Prometheus RED metrics tracker class."""
def __init__(self, method, path):
self.method, self.path = method, path
self.timer = None
def __enter__(self):
HTTP_REQUESTS_TOTAL.labels(self.method, self.path).inc()
self.start = timeit.default_timer()
return self
def __exit__(self, exc_type, exc_val, exc_tb):
if exc_val:
HTTP_EXCEPTIONS_TOTAL.labels(self.method, self.path).inc()
duration = max(timeit.default_timer() - self.start, 0)
HTTP_REQUESTS_LATENCY.labels(self.method, self.path).observe(duration)
>>>test_metrics_tracker.py
from prometheus_client import REGISTRY
from scenario_player.services.common.metrics import REDMetricsTracker
def trigger_metrics(method, path, wait=False, raise_exc=False):
with REDMetricsTracker(method, path):
print("printing stuff")
if wait:
print("waiting a few seconds..")
time.sleep(2)
if raise_exc:
print("raising an exception..")
raise ValueError
print("Not raising an exception")
print("Returning.")
class TestREDMetricContextManager:
def test_requests_made_counter(self):
method, path = 'TEST', 'PATH'
before = REGISTRY.get_sample_value('http_requests_total', [method, path]) or 0
trigger_metrics(method, path)
after = REGISTRY.get_sample_value('http_requests_total', [method, path])
assert after - before == 1
def test_requests_exceptions_counter(self):
method, path = 'TEST', 'PATH'
before = REGISTRY.get_sample_value('http_exceptions_total', [method, path]) or 0
with pytest.raises(ValueError):
trigger_metrics(method, path, raise_exc=True)
after = REGISTRY.get_sample_value('http_exceptions_total', [method, path])
assert after - before == 1
def test_request_latency_count(self):
method, path = 'TEST', 'PATH'
before = REGISTRY.get_sample_value('http_requests_latency_seconds_count', [method, path]) or 0
trigger_metrics(method, path, wait=True)
after = REGISTRY.get_sample_value('http_requests_latency_seconds_count', [method, path])
assert after - before == 1
def test_request_latency_sum(self):
method, path = 'TEST', 'PATH'
before = REGISTRY.get_sample_value('http_requests_latency_seconds_sum', [method, path]) or 0
trigger_metrics(method, path, wait=True)
after = REGISTRY.get_sample_value('http_requests_latency_seconds_sum', [method, path])
diff = after - before
# Check the difference is roughly in the ballpark of what we expect.
assert (diff >= 2) and (diff <= 3)
With the following result:
(scenario-player) X280 /home/nls/devel/scenario-player$ pytest tests/unit-tests/services/common/test_metrics.py
======================================================================= test session starts =======================================================================
platform linux -- Python 3.7.3, pytest-4.5.0, py-1.8.0, pluggy-0.11.0
rootdir: /home/nls/devel/scenario-player
collected 4 items
tests/unit-tests/services/common/test_metrics.py FFFF [100%]
============================================================================ FAILURES =============================================================================
_____________________________________________________ TestREDMetricContextManager.test_requests_made_counter ______________________________________________________
self = <common.test_metrics.TestREDMetricContextManager object at 0x7fb1d6a9ed30>
def test_requests_made_counter(self):
method, path = 'TEST', 'PATH'
before = REGISTRY.get_sample_value('http_requests_total', [method, path]) or 0
trigger_metrics(method, path)
after = REGISTRY.get_sample_value('http_requests_total', [method, path])
> assert after - before == 1
E TypeError: unsupported operand type(s) for -: 'NoneType' and 'int'
tests/unit-tests/services/common/test_metrics.py:32: TypeError
---------------------------------------------------------------------- Captured stdout call -----------------------------------------------------------------------
printing stuff
Not raising an exception
Returning.
__________________________________________________ TestREDMetricContextManager.test_requests_exceptions_counter ___________________________________________________
self = <common.test_metrics.TestREDMetricContextManager object at 0x7fb1d6a78160>
def test_requests_exceptions_counter(self):
method, path = 'TEST', 'PATH'
before = REGISTRY.get_sample_value('http_exceptions_total', [method, path]) or 0
with pytest.raises(ValueError):
trigger_metrics(method, path, raise_exc=True)
after = REGISTRY.get_sample_value('http_exceptions_total', [method, path])
> assert after - before == 1
E TypeError: unsupported operand type(s) for -: 'NoneType' and 'int'
tests/unit-tests/services/common/test_metrics.py:42: TypeError
---------------------------------------------------------------------- Captured stdout call -----------------------------------------------------------------------
printing stuff
raising an exception..
_____________________________________________________ TestREDMetricContextManager.test_request_latency_count ______________________________________________________
self = <common.test_metrics.TestREDMetricContextManager object at 0x7fb1d6abbbe0>
def test_request_latency_count(self):
method, path = 'TEST', 'PATH'
before = REGISTRY.get_sample_value('http_requests_latency_seconds_count', [method, path]) or 0
trigger_metrics(method, path, wait=True)
after = REGISTRY.get_sample_value('http_requests_latency_seconds_count', [method, path])
> assert after - before == 1
E TypeError: unsupported operand type(s) for -: 'NoneType' and 'int'
tests/unit-tests/services/common/test_metrics.py:53: TypeError
---------------------------------------------------------------------- Captured stdout call -----------------------------------------------------------------------
printing stuff
waiting a few seconds..
Not raising an exception
Returning.
______________________________________________________ TestREDMetricContextManager.test_request_latency_sum _______________________________________________________
self = <common.test_metrics.TestREDMetricContextManager object at 0x7fb1d6a647b8>
def test_request_latency_sum(self):
method, path = 'TEST', 'PATH'
before = REGISTRY.get_sample_value('http_requests_latency_seconds_sum', [method, path]) or 0
trigger_metrics(method, path, wait=True)
after = REGISTRY.get_sample_value('http_requests_latency_seconds_sum', [method, path])
> diff = after - before
E TypeError: unsupported operand type(s) for -: 'NoneType' and 'int'
tests/unit-tests/services/common/test_metrics.py:64: TypeError
---------------------------------------------------------------------- Captured stdout call -----------------------------------------------------------------------
printing stuff
waiting a few seconds..
Not raising an exception
Returning.
==================================================================== 4 failed in 4.12 seconds =====================================================================
I figured the first time i call get_sample_value
is expected to be None
, since the metric hasn't been called before and the ts
may not exist yet. Now, however, I'm not so sure.
A classic: REGISTRY.get_sample_value()
expects the labels to be passed as dict
, not list
. So calling it like so:
...
REGISTRY.get_sample_value('http_requests_total', {'method': method, 'path': path})
...
..fixes the issue and the tests pass.
If you love us? You can donate to us via Paypal or buy me a coffee so we can maintain and grow! Thank you!
Donate Us With