Skip to content

fix(metrics): Fix metrics compatibility with greenlet/gevent #2756

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 10 commits into from
Feb 27, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
26 changes: 18 additions & 8 deletions sentry_sdk/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
import socket

from sentry_sdk._compat import (
PY37,
datetime_utcnow,
string_types,
text_type,
Expand All @@ -20,6 +21,7 @@
get_type_name,
get_default_release,
handle_in_app,
is_gevent,
logger,
)
from sentry_sdk.serializer import serialize
Expand Down Expand Up @@ -256,14 +258,22 @@ def _capture_envelope(envelope):
self.metrics_aggregator = None # type: Optional[MetricsAggregator]
experiments = self.options.get("_experiments", {})
if experiments.get("enable_metrics", True):
from sentry_sdk.metrics import MetricsAggregator

self.metrics_aggregator = MetricsAggregator(
capture_func=_capture_envelope,
enable_code_locations=bool(
experiments.get("metric_code_locations", True)
),
)
# Context vars are not working correctly on Python <=3.6
# with gevent.
metrics_supported = not is_gevent() or PY37
if metrics_supported:
from sentry_sdk.metrics import MetricsAggregator

self.metrics_aggregator = MetricsAggregator(
capture_func=_capture_envelope,
enable_code_locations=bool(
experiments.get("metric_code_locations", True)
),
)
else:
logger.info(
"Metrics not supported on Python 3.6 and lower with gevent."
)

max_request_body_size = ("always", "never", "small", "medium")
if self.options["max_request_body_size"] not in max_request_body_size:
Expand Down
42 changes: 8 additions & 34 deletions sentry_sdk/metrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,15 +11,14 @@
from functools import wraps, partial

import sentry_sdk
from sentry_sdk._compat import PY2, text_type, utc_from_timestamp, iteritems
from sentry_sdk._compat import text_type, utc_from_timestamp, iteritems
from sentry_sdk.utils import (
ContextVar,
now,
nanosecond_time,
to_timestamp,
serialize_frame,
json_dumps,
is_gevent,
)
from sentry_sdk.envelope import Envelope, Item
from sentry_sdk.tracing import (
Expand Down Expand Up @@ -54,18 +53,7 @@
from sentry_sdk._types import MetricValue


try:
from gevent.monkey import get_original # type: ignore
from gevent.threadpool import ThreadPool # type: ignore
except ImportError:
import importlib

def get_original(module, name):
# type: (str, str) -> Any
return getattr(importlib.import_module(module), name)


_in_metrics = ContextVar("in_metrics")
_in_metrics = ContextVar("in_metrics", default=False)
_sanitize_key = partial(re.compile(r"[^a-zA-Z0-9_/.-]+").sub, "_")
_sanitize_value = partial(re.compile(r"[^\w\d_:/@\.{}\[\]$-]+", re.UNICODE).sub, "_")
_set = set # set is shadowed below
Expand Down Expand Up @@ -96,7 +84,7 @@ def get_code_location(stacklevel):
def recursion_protection():
# type: () -> Generator[bool, None, None]
"""Enters recursion protection and returns the old flag."""
old_in_metrics = _in_metrics.get(False)
old_in_metrics = _in_metrics.get()
_in_metrics.set(True)
try:
yield old_in_metrics
Expand Down Expand Up @@ -423,16 +411,7 @@ def __init__(
self._running = True
self._lock = threading.Lock()

if is_gevent() and PY2:
# get_original on threading.Event in Python 2 incorrectly returns
# the gevent-patched class. Luckily, threading.Event is just an alias
# for threading._Event in Python 2, and get_original on
# threading._Event correctly gets us the stdlib original.
event_cls = get_original("threading", "_Event")
else:
event_cls = get_original("threading", "Event")
self._flush_event = event_cls() # type: threading.Event

self._flush_event = threading.Event() # type: threading.Event
self._force_flush = False

# The aggregator shifts its flushing by up to an entire rollup window to
Expand All @@ -443,7 +422,7 @@ def __init__(
# jittering.
self._flush_shift = random.random() * self.ROLLUP_IN_SECONDS

self._flusher = None # type: Optional[Union[threading.Thread, ThreadPool]]
self._flusher = None # type: Optional[threading.Thread]
self._flusher_pid = None # type: Optional[int]

def _ensure_thread(self):
Expand All @@ -466,16 +445,11 @@ def _ensure_thread(self):

self._flusher_pid = pid

if not is_gevent():
self._flusher = threading.Thread(target=self._flush_loop)
self._flusher.daemon = True
start_flusher = self._flusher.start
else:
self._flusher = ThreadPool(1)
start_flusher = partial(self._flusher.spawn, func=self._flush_loop)
self._flusher = threading.Thread(target=self._flush_loop)
self._flusher.daemon = True

try:
start_flusher()
self._flusher.start()
except RuntimeError:
# Unfortunately at this point the interpreter is in a state that no
# longer allows us to spawn a thread and we have to bail.
Expand Down
57 changes: 56 additions & 1 deletion tests/test_metrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,17 @@
except ImportError:
import mock # python < 3.3

try:
import gevent
except ImportError:
gevent = None


minimum_python_37_with_gevent = pytest.mark.skipif(
gevent and sys.version_info < (3, 7),
reason="Require Python 3.7 or higher with gevent",
)


def parse_metrics(bytes):
rv = []
Expand Down Expand Up @@ -45,6 +56,7 @@ def parse_metrics(bytes):
return rv


@minimum_python_37_with_gevent
@pytest.mark.forked
def test_incr(sentry_init, capture_envelopes, maybe_monkeypatched_threading):
sentry_init(
Expand Down Expand Up @@ -97,6 +109,7 @@ def test_incr(sentry_init, capture_envelopes, maybe_monkeypatched_threading):
}


@minimum_python_37_with_gevent
@pytest.mark.forked
def test_timing(sentry_init, capture_envelopes, maybe_monkeypatched_threading):
sentry_init(
Expand Down Expand Up @@ -157,6 +170,7 @@ def test_timing(sentry_init, capture_envelopes, maybe_monkeypatched_threading):
)


@minimum_python_37_with_gevent
@pytest.mark.forked
def test_timing_decorator(
sentry_init, capture_envelopes, maybe_monkeypatched_threading
Expand Down Expand Up @@ -252,6 +266,7 @@ def amazing_nano():
assert line.strip() == "assert amazing() == 42"


@minimum_python_37_with_gevent
@pytest.mark.forked
def test_timing_basic(sentry_init, capture_envelopes, maybe_monkeypatched_threading):
sentry_init(
Expand Down Expand Up @@ -306,6 +321,7 @@ def test_timing_basic(sentry_init, capture_envelopes, maybe_monkeypatched_thread
}


@minimum_python_37_with_gevent
@pytest.mark.forked
def test_distribution(sentry_init, capture_envelopes, maybe_monkeypatched_threading):
sentry_init(
Expand Down Expand Up @@ -368,6 +384,7 @@ def test_distribution(sentry_init, capture_envelopes, maybe_monkeypatched_thread
)


@minimum_python_37_with_gevent
@pytest.mark.forked
def test_set(sentry_init, capture_envelopes, maybe_monkeypatched_threading):
sentry_init(
Expand Down Expand Up @@ -421,6 +438,7 @@ def test_set(sentry_init, capture_envelopes, maybe_monkeypatched_threading):
}


@minimum_python_37_with_gevent
@pytest.mark.forked
def test_gauge(sentry_init, capture_envelopes, maybe_monkeypatched_threading):
sentry_init(
Expand Down Expand Up @@ -454,6 +472,7 @@ def test_gauge(sentry_init, capture_envelopes, maybe_monkeypatched_threading):
}


@minimum_python_37_with_gevent
@pytest.mark.forked
def test_multiple(sentry_init, capture_envelopes):
sentry_init(
Expand Down Expand Up @@ -508,6 +527,7 @@ def test_multiple(sentry_init, capture_envelopes):
}


@minimum_python_37_with_gevent
@pytest.mark.forked
def test_transaction_name(
sentry_init, capture_envelopes, maybe_monkeypatched_threading
Expand Down Expand Up @@ -548,6 +568,7 @@ def test_transaction_name(
}


@minimum_python_37_with_gevent
@pytest.mark.forked
@pytest.mark.parametrize("sample_rate", [1.0, None])
def test_metric_summaries(
Expand Down Expand Up @@ -658,6 +679,7 @@ def test_metric_summaries(
}


@minimum_python_37_with_gevent
@pytest.mark.forked
def test_metrics_summary_disabled(
sentry_init, capture_envelopes, maybe_monkeypatched_threading
Expand Down Expand Up @@ -702,6 +724,7 @@ def test_metrics_summary_disabled(
assert "_metrics_summary" not in t["spans"][0]


@minimum_python_37_with_gevent
@pytest.mark.forked
def test_metrics_summary_filtered(
sentry_init, capture_envelopes, maybe_monkeypatched_threading
Expand Down Expand Up @@ -771,6 +794,7 @@ def should_summarize_metric(key, tags):
} in t["d:foo@second"]


@minimum_python_37_with_gevent
@pytest.mark.forked
def test_tag_normalization(
sentry_init, capture_envelopes, maybe_monkeypatched_threading
Expand Down Expand Up @@ -818,6 +842,7 @@ def test_tag_normalization(
# fmt: on


@minimum_python_37_with_gevent
@pytest.mark.forked
def test_before_emit_metric(
sentry_init, capture_envelopes, maybe_monkeypatched_threading
Expand Down Expand Up @@ -861,6 +886,7 @@ def before_emit(key, tags):
}


@minimum_python_37_with_gevent
@pytest.mark.forked
def test_aggregator_flush(
sentry_init, capture_envelopes, maybe_monkeypatched_threading
Expand All @@ -881,6 +907,7 @@ def test_aggregator_flush(
assert Hub.current.client.metrics_aggregator.buckets == {}


@minimum_python_37_with_gevent
@pytest.mark.forked
def test_tag_serialization(
sentry_init, capture_envelopes, maybe_monkeypatched_threading
Expand Down Expand Up @@ -921,6 +948,7 @@ def test_tag_serialization(
}


@minimum_python_37_with_gevent
@pytest.mark.forked
def test_flush_recursion_protection(
sentry_init, capture_envelopes, monkeypatch, maybe_monkeypatched_threading
Expand Down Expand Up @@ -953,11 +981,12 @@ def bad_capture_envelope(*args, **kwargs):
assert m[0][1] == "counter@none"


@minimum_python_37_with_gevent
@pytest.mark.forked
def test_flush_recursion_protection_background_flush(
sentry_init, capture_envelopes, monkeypatch, maybe_monkeypatched_threading
):
monkeypatch.setattr(metrics.MetricsAggregator, "FLUSHER_SLEEP_TIME", 0.1)
monkeypatch.setattr(metrics.MetricsAggregator, "FLUSHER_SLEEP_TIME", 0.01)
sentry_init(
release="fun-release",
environment="not-fun-env",
Expand All @@ -984,3 +1013,29 @@ def bad_capture_envelope(*args, **kwargs):
m = parse_metrics(envelope.items[0].payload.get_bytes())
assert len(m) == 1
assert m[0][1] == "counter@none"


@pytest.mark.skipif(
not gevent or sys.version_info >= (3, 7),
reason="Python 3.6 or lower and gevent required",
)
@pytest.mark.forked
def test_disable_metrics_for_old_python_with_gevent(
sentry_init, capture_envelopes, maybe_monkeypatched_threading
):
if maybe_monkeypatched_threading != "greenlet":
pytest.skip("Test specifically for gevent/greenlet")

sentry_init(
release="fun-release",
environment="not-fun-env",
_experiments={"enable_metrics": True},
)
envelopes = capture_envelopes()

metrics.incr("counter")

Hub.current.flush()

assert Hub.current.client.metrics_aggregator is None
assert not envelopes
6 changes: 0 additions & 6 deletions tox.ini
Original file line number Diff line number Diff line change
Expand Up @@ -247,12 +247,6 @@ deps =
{py3.6,py3.7,py3.8,py3.9,py3.10,py3.11,py3.12}-common: pytest<7.0.0

# === Gevent ===
# See http://www.gevent.org/install.html#older-versions-of-python
# for justification of the versions pinned below
py3.5-gevent: gevent==20.9.0
# See https://stackoverflow.com/questions/51496550/runtime-warning-greenlet-greenlet-size-changed
# for justification why greenlet is pinned here
py3.5-gevent: greenlet==0.4.17
Comment on lines -250 to -255
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

We don't actually run py3.5-gevent so we don't need this

{py2.7,py3.6,py3.7,py3.8,py3.9,py3.10,py3.11,py3.12}-gevent: gevent>=22.10.0, <22.11.0
# See https://github.com/pytest-dev/pytest/issues/9621
# and https://github.com/pytest-dev/pytest-forked/issues/67
Expand Down