From add20670529e0fdbcfb654a3660e1f2b4b049a9a Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Mon, 10 Jun 2024 20:51:07 +0200 Subject: [PATCH 001/244] Skeletons for new components --- .../opentelemetry/contextvars_context.py | 14 ++++++ .../opentelemetry/potel_span_processor.py | 44 +++++++++++++++++++ 2 files changed, 58 insertions(+) create mode 100644 sentry_sdk/integrations/opentelemetry/contextvars_context.py create mode 100644 sentry_sdk/integrations/opentelemetry/potel_span_processor.py diff --git a/sentry_sdk/integrations/opentelemetry/contextvars_context.py b/sentry_sdk/integrations/opentelemetry/contextvars_context.py new file mode 100644 index 0000000000..7a382064c9 --- /dev/null +++ b/sentry_sdk/integrations/opentelemetry/contextvars_context.py @@ -0,0 +1,14 @@ +from opentelemetry.context.context import Context # type: ignore +from opentelemetry.context.contextvars_context import ContextVarsRuntimeContext # type: ignore + + +class SentryContextVarsRuntimeContext(ContextVarsRuntimeContext): # type: ignore + def attach(self, context): + # type: (Context) -> object + # TODO-neel-potel do scope management + return super().attach(context) + + def detach(self, token): + # type: (object) -> None + # TODO-neel-potel not sure if we need anything here, see later + super().detach(token) diff --git a/sentry_sdk/integrations/opentelemetry/potel_span_processor.py b/sentry_sdk/integrations/opentelemetry/potel_span_processor.py new file mode 100644 index 0000000000..795068033e --- /dev/null +++ b/sentry_sdk/integrations/opentelemetry/potel_span_processor.py @@ -0,0 +1,44 @@ +from opentelemetry.sdk.trace import SpanProcessor # type: ignore +from opentelemetry.context import Context # type: ignore +from opentelemetry.trace import Span # type: ignore + +from sentry_sdk._types import TYPE_CHECKING + +if TYPE_CHECKING: + from typing import Optional + + +class PotelSentrySpanProcessor(SpanProcessor): # type: ignore + """ + Converts OTel spans into Sentry spans so they can be sent to the Sentry backend. + """ + + def __new__(cls): + # type: () -> PotelSentrySpanProcessor + if not hasattr(cls, "instance"): + cls.instance = super().__new__(cls) + + return cls.instance + + def __init__(self): + # type: () -> None + pass + + def on_start(self, span, parent_context=None): + # type: (Span, Optional[Context]) -> None + pass + + def on_end(self, span): + # type: (Span) -> None + pass + + # TODO-neel-potel not sure we need a clear like JS + def shutdown(self): + # type: () -> None + pass + + # TODO-neel-potel change default? this is 30 sec + # TODO-neel-potel call this in client.flush + def force_flush(self, timeout_millis=30000): + # type: (int) -> bool + return True From 5bad7f16e0770d734ab6460cc82b923acfc7c244 Mon Sep 17 00:00:00 2001 From: Ivana Kellyerova Date: Wed, 26 Jun 2024 11:41:18 +0200 Subject: [PATCH 002/244] mypy fixes --- .../opentelemetry/contextvars_context.py | 6 +++--- .../opentelemetry/potel_span_processor.py | 12 ++++++------ 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/sentry_sdk/integrations/opentelemetry/contextvars_context.py b/sentry_sdk/integrations/opentelemetry/contextvars_context.py index 7a382064c9..e74d67dc97 100644 --- a/sentry_sdk/integrations/opentelemetry/contextvars_context.py +++ b/sentry_sdk/integrations/opentelemetry/contextvars_context.py @@ -1,8 +1,8 @@ -from opentelemetry.context.context import Context # type: ignore -from opentelemetry.context.contextvars_context import ContextVarsRuntimeContext # type: ignore +from opentelemetry.context.context import Context +from opentelemetry.context.contextvars_context import ContextVarsRuntimeContext -class SentryContextVarsRuntimeContext(ContextVarsRuntimeContext): # type: ignore +class SentryContextVarsRuntimeContext(ContextVarsRuntimeContext): def attach(self, context): # type: (Context) -> object # TODO-neel-potel do scope management diff --git a/sentry_sdk/integrations/opentelemetry/potel_span_processor.py b/sentry_sdk/integrations/opentelemetry/potel_span_processor.py index 795068033e..94f01b3283 100644 --- a/sentry_sdk/integrations/opentelemetry/potel_span_processor.py +++ b/sentry_sdk/integrations/opentelemetry/potel_span_processor.py @@ -1,14 +1,14 @@ -from opentelemetry.sdk.trace import SpanProcessor # type: ignore -from opentelemetry.context import Context # type: ignore -from opentelemetry.trace import Span # type: ignore +from opentelemetry.sdk.trace import SpanProcessor +from opentelemetry.context import Context from sentry_sdk._types import TYPE_CHECKING if TYPE_CHECKING: from typing import Optional + from opentelemetry.sdk.trace import ReadableSpan -class PotelSentrySpanProcessor(SpanProcessor): # type: ignore +class PotelSentrySpanProcessor(SpanProcessor): """ Converts OTel spans into Sentry spans so they can be sent to the Sentry backend. """ @@ -25,11 +25,11 @@ def __init__(self): pass def on_start(self, span, parent_context=None): - # type: (Span, Optional[Context]) -> None + # type: (ReadableSpan, Optional[Context]) -> None pass def on_end(self, span): - # type: (Span) -> None + # type: (ReadableSpan) -> None pass # TODO-neel-potel not sure we need a clear like JS From b6a62319d43e8871d8f74883af31ec2b1cf7e4a0 Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Tue, 9 Jul 2024 14:35:41 +0200 Subject: [PATCH 003/244] Add simple scope management whenever a context is attached (#3159) Add simple scope management whenever a context is attached * create a new otel context `_SCOPES_KEY` that will hold a tuple of `(curent_scope, isolation_scope)` * the `current_scope` will always be forked (like on every span creation/context update in practice) * note that this is on `attach`, so not on all copy-on-write context object creation but only on apis such as [`trace.use_span`](https://github.com/open-telemetry/opentelemetry-python/blob/ba22b165471bde2037620f2c850ab648a849fbc0/opentelemetry-api/src/opentelemetry/trace/__init__.py#L547) or [`tracer.start_as_current_span`](https://github.com/open-telemetry/opentelemetry-python/blob/ba22b165471bde2037620f2c850ab648a849fbc0/opentelemetry-api/src/opentelemetry/trace/__init__.py#L329) * basically every otel `context` fork corresponds to our `current_scope` fork * the `isolation_scope` currently will not be forked * these will later be updated, for instance when we update our top level scope apis that fork isolation scope, that will also have a corresponding change in this `attach` function --- .../opentelemetry/contextvars_context.py | 26 ++++++++++++++----- 1 file changed, 19 insertions(+), 7 deletions(-) diff --git a/sentry_sdk/integrations/opentelemetry/contextvars_context.py b/sentry_sdk/integrations/opentelemetry/contextvars_context.py index e74d67dc97..5e5eb9ba30 100644 --- a/sentry_sdk/integrations/opentelemetry/contextvars_context.py +++ b/sentry_sdk/integrations/opentelemetry/contextvars_context.py @@ -1,14 +1,26 @@ -from opentelemetry.context.context import Context +from opentelemetry.context import Context, create_key, get_value, set_value from opentelemetry.context.contextvars_context import ContextVarsRuntimeContext +from sentry_sdk.scope import Scope + + +_SCOPES_KEY = create_key("sentry_scopes") + class SentryContextVarsRuntimeContext(ContextVarsRuntimeContext): def attach(self, context): # type: (Context) -> object - # TODO-neel-potel do scope management - return super().attach(context) + scopes = get_value(_SCOPES_KEY, context) + + if scopes and isinstance(scopes, tuple): + (current_scope, isolation_scope) = scopes + else: + current_scope = Scope.get_current_scope() + isolation_scope = Scope.get_isolation_scope() + + # TODO-neel-potel fork isolation_scope too like JS + # once we setup our own apis to pass through to otel + new_scopes = (current_scope.fork(), isolation_scope) + new_context = set_value(_SCOPES_KEY, new_scopes, context) - def detach(self, token): - # type: (object) -> None - # TODO-neel-potel not sure if we need anything here, see later - super().detach(token) + return super().attach(new_context) From 4428ee90c9c856a56701d253db32dc2cc6820822 Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Tue, 9 Jul 2024 14:37:37 +0200 Subject: [PATCH 004/244] Implement new POTel span processor (#3223) * only acts on `on_end` instead of both `on_start/on_end` as before * store children spans in a dict mapping `span_id -> children` * new dict only stores otel span objects and no sentry transaction/span objects so we save a bit of useless memory allocation * I'm not using our current `Transaction/Span` classes at all to build the event because when we add our APIs later, we'll need to rip these out and we also avoid having to deal with the `instrumenter` problem * if we get a root span (without parent), we recursively walk the dict and find the children and package up the transaction event and send it * I didn't do it like JS because I think this way is better * they [group an array of `finished_spans`](https://github.com/getsentry/sentry-javascript/blob/7e298036a21a5658f3eb9ba184165178c48d7ef8/packages/opentelemetry/src/spanExporter.ts#L132) every time a root span ends and I think this uses more cpu than what I did * and the dict like I used it doesn't take more space than the array either * if we get a span with a parent we just update the dict to find the span later * moved the common `is_sentry_span` logic to utils --- .../integrations/opentelemetry/consts.py | 2 + .../opentelemetry/potel_span_processor.py | 149 +++++++++++++++++- .../opentelemetry/span_processor.py | 118 ++------------ .../integrations/opentelemetry/utils.py | 105 ++++++++++++ .../opentelemetry/test_span_processor.py | 10 +- 5 files changed, 270 insertions(+), 114 deletions(-) create mode 100644 sentry_sdk/integrations/opentelemetry/utils.py diff --git a/sentry_sdk/integrations/opentelemetry/consts.py b/sentry_sdk/integrations/opentelemetry/consts.py index ec493449d3..69a770ad53 100644 --- a/sentry_sdk/integrations/opentelemetry/consts.py +++ b/sentry_sdk/integrations/opentelemetry/consts.py @@ -3,3 +3,5 @@ SENTRY_TRACE_KEY = create_key("sentry-trace") SENTRY_BAGGAGE_KEY = create_key("sentry-baggage") +OTEL_SENTRY_CONTEXT = "otel" +SPAN_ORIGIN = "auto.otel" diff --git a/sentry_sdk/integrations/opentelemetry/potel_span_processor.py b/sentry_sdk/integrations/opentelemetry/potel_span_processor.py index 94f01b3283..faa583a18d 100644 --- a/sentry_sdk/integrations/opentelemetry/potel_span_processor.py +++ b/sentry_sdk/integrations/opentelemetry/potel_span_processor.py @@ -1,11 +1,24 @@ -from opentelemetry.sdk.trace import SpanProcessor +from collections import deque, defaultdict + +from opentelemetry.trace import format_trace_id, format_span_id from opentelemetry.context import Context +from opentelemetry.sdk.trace import Span, ReadableSpan, SpanProcessor +from sentry_sdk import capture_event +from sentry_sdk.integrations.opentelemetry.utils import ( + is_sentry_span, + convert_otel_timestamp, + extract_span_data, +) +from sentry_sdk.integrations.opentelemetry.consts import ( + OTEL_SENTRY_CONTEXT, + SPAN_ORIGIN, +) from sentry_sdk._types import TYPE_CHECKING if TYPE_CHECKING: - from typing import Optional - from opentelemetry.sdk.trace import ReadableSpan + from typing import Optional, List, Any, Deque, DefaultDict + from sentry_sdk._types import Event class PotelSentrySpanProcessor(SpanProcessor): @@ -22,15 +35,25 @@ def __new__(cls): def __init__(self): # type: () -> None - pass + self._children_spans = defaultdict( + list + ) # type: DefaultDict[int, List[ReadableSpan]] def on_start(self, span, parent_context=None): - # type: (ReadableSpan, Optional[Context]) -> None + # type: (Span, Optional[Context]) -> None pass def on_end(self, span): # type: (ReadableSpan) -> None - pass + if is_sentry_span(span): + return + + # TODO-neel-potel-remote only take parent if not remote + if span.parent: + self._children_spans[span.parent.span_id].append(span) + else: + # if have a root span ending, we build a transaction and send it + self._flush_root_span(span) # TODO-neel-potel not sure we need a clear like JS def shutdown(self): @@ -42,3 +65,117 @@ def shutdown(self): def force_flush(self, timeout_millis=30000): # type: (int) -> bool return True + + def _flush_root_span(self, span): + # type: (ReadableSpan) -> None + transaction_event = self._root_span_to_transaction_event(span) + if not transaction_event: + return + + spans = [] + for child in self._collect_children(span): + span_json = self._span_to_json(child) + if span_json: + spans.append(span_json) + transaction_event["spans"] = spans + # TODO-neel-potel sort and cutoff max spans + + capture_event(transaction_event) + + def _collect_children(self, span): + # type: (ReadableSpan) -> List[ReadableSpan] + if not span.context: + return [] + + children = [] + bfs_queue = deque() # type: Deque[int] + bfs_queue.append(span.context.span_id) + + while bfs_queue: + parent_span_id = bfs_queue.popleft() + node_children = self._children_spans.pop(parent_span_id, []) + children.extend(node_children) + bfs_queue.extend( + [child.context.span_id for child in node_children if child.context] + ) + + return children + + # we construct the event from scratch here + # and not use the current Transaction class for easier refactoring + def _root_span_to_transaction_event(self, span): + # type: (ReadableSpan) -> Optional[Event] + if not span.context: + return None + if not span.start_time: + return None + if not span.end_time: + return None + + trace_id = format_trace_id(span.context.trace_id) + span_id = format_span_id(span.context.span_id) + parent_span_id = format_span_id(span.parent.span_id) if span.parent else None + + (op, description, _) = extract_span_data(span) + + trace_context = { + "trace_id": trace_id, + "span_id": span_id, + "origin": SPAN_ORIGIN, + "op": op, + "status": "ok", # TODO-neel-potel span status mapping + } # type: dict[str, Any] + + if parent_span_id: + trace_context["parent_span_id"] = parent_span_id + if span.attributes: + trace_context["data"] = dict(span.attributes) + + contexts = {"trace": trace_context} + if span.resource.attributes: + contexts[OTEL_SENTRY_CONTEXT] = {"resource": dict(span.resource.attributes)} + + event = { + "type": "transaction", + "transaction": description, + # TODO-neel-potel tx source based on integration + "transaction_info": {"source": "custom"}, + "contexts": contexts, + "start_timestamp": convert_otel_timestamp(span.start_time), + "timestamp": convert_otel_timestamp(span.end_time), + } # type: Event + + return event + + def _span_to_json(self, span): + # type: (ReadableSpan) -> Optional[dict[str, Any]] + if not span.context: + return None + if not span.start_time: + return None + if not span.end_time: + return None + + trace_id = format_trace_id(span.context.trace_id) + span_id = format_span_id(span.context.span_id) + parent_span_id = format_span_id(span.parent.span_id) if span.parent else None + + (op, description, _) = extract_span_data(span) + + span_json = { + "trace_id": trace_id, + "span_id": span_id, + "origin": SPAN_ORIGIN, + "op": op, + "description": description, + "status": "ok", # TODO-neel-potel span status mapping + "start_timestamp": convert_otel_timestamp(span.start_time), + "timestamp": convert_otel_timestamp(span.end_time), + } # type: dict[str, Any] + + if parent_span_id: + span_json["parent_span_id"] = parent_span_id + if span.attributes: + span_json["data"] = dict(span.attributes) + + return span_json diff --git a/sentry_sdk/integrations/opentelemetry/span_processor.py b/sentry_sdk/integrations/opentelemetry/span_processor.py index d54372b374..b5dec4f6be 100644 --- a/sentry_sdk/integrations/opentelemetry/span_processor.py +++ b/sentry_sdk/integrations/opentelemetry/span_processor.py @@ -4,12 +4,10 @@ from opentelemetry.context import get_value from opentelemetry.sdk.trace import SpanProcessor, ReadableSpan as OTelSpan -from opentelemetry.semconv.trace import SpanAttributes from opentelemetry.trace import ( format_span_id, format_trace_id, get_current_span, - SpanKind, ) from opentelemetry.trace.span import ( INVALID_SPAN_ID, @@ -20,22 +18,24 @@ from sentry_sdk.integrations.opentelemetry.consts import ( SENTRY_BAGGAGE_KEY, SENTRY_TRACE_KEY, + OTEL_SENTRY_CONTEXT, + SPAN_ORIGIN, +) +from sentry_sdk.integrations.opentelemetry.utils import ( + is_sentry_span, + extract_span_data, ) from sentry_sdk.scope import add_global_event_processor from sentry_sdk.tracing import Transaction, Span as SentrySpan -from sentry_sdk.utils import Dsn from sentry_sdk._types import TYPE_CHECKING -from urllib3.util import parse_url as urlparse if TYPE_CHECKING: from typing import Any, Optional, Union from opentelemetry import context as context_api from sentry_sdk._types import Event, Hint -OPEN_TELEMETRY_CONTEXT = "otel" SPAN_MAX_TIME_OPEN_MINUTES = 10 -SPAN_ORIGIN = "auto.otel" def link_trace_context_to_error_event(event, otel_span_map): @@ -117,18 +117,13 @@ def on_start(self, otel_span, parent_context=None): if not client.dsn: return - try: - _ = Dsn(client.dsn) - except Exception: - return - if client.options["instrumenter"] != INSTRUMENTER.OTEL: return if not otel_span.get_span_context().is_valid: return - if self._is_sentry_span(otel_span): + if is_sentry_span(otel_span): return trace_data = self._get_trace_data(otel_span, parent_context) @@ -200,7 +195,7 @@ def on_end(self, otel_span): if isinstance(sentry_span, Transaction): sentry_span.name = otel_span.name sentry_span.set_context( - OPEN_TELEMETRY_CONTEXT, self._get_otel_context(otel_span) + OTEL_SENTRY_CONTEXT, self._get_otel_context(otel_span) ) self._update_transaction_with_otel_data(sentry_span, otel_span) @@ -223,27 +218,6 @@ def on_end(self, otel_span): self._prune_old_spans() - def _is_sentry_span(self, otel_span): - # type: (OTelSpan) -> bool - """ - Break infinite loop: - HTTP requests to Sentry are caught by OTel and send again to Sentry. - """ - otel_span_url = None - if otel_span.attributes is not None: - otel_span_url = otel_span.attributes.get(SpanAttributes.HTTP_URL) - otel_span_url = cast("Optional[str]", otel_span_url) - - dsn_url = None - client = get_client() - if client.dsn: - dsn_url = Dsn(client.dsn).netloc - - if otel_span_url and dsn_url and dsn_url in otel_span_url: - return True - - return False - def _get_otel_context(self, otel_span): # type: (OTelSpan) -> dict[str, Any] """ @@ -312,81 +286,19 @@ def _update_span_with_otel_data(self, sentry_span, otel_span): """ sentry_span.set_data("otel.kind", otel_span.kind) - op = otel_span.name - description = otel_span.name - if otel_span.attributes is not None: for key, val in otel_span.attributes.items(): sentry_span.set_data(key, val) - http_method = otel_span.attributes.get(SpanAttributes.HTTP_METHOD) - http_method = cast("Optional[str]", http_method) - - db_query = otel_span.attributes.get(SpanAttributes.DB_SYSTEM) - - if http_method: - op = "http" - - if otel_span.kind == SpanKind.SERVER: - op += ".server" - elif otel_span.kind == SpanKind.CLIENT: - op += ".client" - - description = http_method - - peer_name = otel_span.attributes.get(SpanAttributes.NET_PEER_NAME, None) - if peer_name: - description += " {}".format(peer_name) - - target = otel_span.attributes.get(SpanAttributes.HTTP_TARGET, None) - if target: - description += " {}".format(target) - - if not peer_name and not target: - url = otel_span.attributes.get(SpanAttributes.HTTP_URL, None) - url = cast("Optional[str]", url) - if url: - parsed_url = urlparse(url) - url = "{}://{}{}".format( - parsed_url.scheme, parsed_url.netloc, parsed_url.path - ) - description += " {}".format(url) - - status_code = otel_span.attributes.get( - SpanAttributes.HTTP_STATUS_CODE, None - ) - status_code = cast("Optional[int]", status_code) - if status_code: - sentry_span.set_http_status(status_code) - - elif db_query: - op = "db" - statement = otel_span.attributes.get(SpanAttributes.DB_STATEMENT, None) - statement = cast("Optional[str]", statement) - if statement: - description = statement - + (op, description, status_code) = extract_span_data(otel_span) sentry_span.op = op sentry_span.description = description + if status_code: + sentry_span.set_http_status(status_code) def _update_transaction_with_otel_data(self, sentry_span, otel_span): # type: (SentrySpan, OTelSpan) -> None - if otel_span.attributes is None: - return - - http_method = otel_span.attributes.get(SpanAttributes.HTTP_METHOD) - - if http_method: - status_code = otel_span.attributes.get(SpanAttributes.HTTP_STATUS_CODE) - status_code = cast("Optional[int]", status_code) - if status_code: - sentry_span.set_http_status(status_code) - - op = "http" - - if otel_span.kind == SpanKind.SERVER: - op += ".server" - elif otel_span.kind == SpanKind.CLIENT: - op += ".client" - - sentry_span.op = op + (op, _, status_code) = extract_span_data(otel_span) + sentry_span.op = op + if status_code: + sentry_span.set_http_status(status_code) diff --git a/sentry_sdk/integrations/opentelemetry/utils.py b/sentry_sdk/integrations/opentelemetry/utils.py new file mode 100644 index 0000000000..1dc77ab150 --- /dev/null +++ b/sentry_sdk/integrations/opentelemetry/utils.py @@ -0,0 +1,105 @@ +from typing import cast +from datetime import datetime, timezone + +from opentelemetry.trace import SpanKind +from opentelemetry.semconv.trace import SpanAttributes +from opentelemetry.sdk.trace import ReadableSpan +from urllib3.util import parse_url as urlparse + +from sentry_sdk import get_client +from sentry_sdk.utils import Dsn + +from sentry_sdk._types import TYPE_CHECKING + +if TYPE_CHECKING: + from typing import Optional, Tuple + + +def is_sentry_span(span): + # type: (ReadableSpan) -> bool + """ + Break infinite loop: + HTTP requests to Sentry are caught by OTel and send again to Sentry. + """ + if not span.attributes: + return False + + span_url = span.attributes.get(SpanAttributes.HTTP_URL, None) + span_url = cast("Optional[str]", span_url) + + if not span_url: + return False + + dsn_url = None + client = get_client() + + if client.dsn: + try: + dsn_url = Dsn(client.dsn).netloc + except Exception: + pass + + if not dsn_url: + return False + + if dsn_url in span_url: + return True + + return False + + +def convert_otel_timestamp(time): + # type: (int) -> datetime + return datetime.fromtimestamp(time / 1e9, timezone.utc) + + +def extract_span_data(span): + # type: (ReadableSpan) -> Tuple[str, str, Optional[int]] + op = span.name + description = span.name + status_code = None + + if span.attributes is None: + return (op, description, status_code) + + http_method = span.attributes.get(SpanAttributes.HTTP_METHOD) + http_method = cast("Optional[str]", http_method) + db_query = span.attributes.get(SpanAttributes.DB_SYSTEM) + + if http_method: + op = "http" + if span.kind == SpanKind.SERVER: + op += ".server" + elif span.kind == SpanKind.CLIENT: + op += ".client" + + description = http_method + + peer_name = span.attributes.get(SpanAttributes.NET_PEER_NAME, None) + if peer_name: + description += " {}".format(peer_name) + + target = span.attributes.get(SpanAttributes.HTTP_TARGET, None) + if target: + description += " {}".format(target) + + if not peer_name and not target: + url = span.attributes.get(SpanAttributes.HTTP_URL, None) + url = cast("Optional[str]", url) + if url: + parsed_url = urlparse(url) + url = "{}://{}{}".format( + parsed_url.scheme, parsed_url.netloc, parsed_url.path + ) + description += " {}".format(url) + + status_code = span.attributes.get(SpanAttributes.HTTP_STATUS_CODE) + elif db_query: + op = "db" + statement = span.attributes.get(SpanAttributes.DB_STATEMENT, None) + statement = cast("Optional[str]", statement) + if statement: + description = statement + + status_code = cast("Optional[int]", status_code) + return (op, description, status_code) diff --git a/tests/integrations/opentelemetry/test_span_processor.py b/tests/integrations/opentelemetry/test_span_processor.py index 8064e127f6..cc52735214 100644 --- a/tests/integrations/opentelemetry/test_span_processor.py +++ b/tests/integrations/opentelemetry/test_span_processor.py @@ -10,6 +10,7 @@ SentrySpanProcessor, link_trace_context_to_error_event, ) +from sentry_sdk.integrations.opentelemetry.utils import is_sentry_span from sentry_sdk.scope import Scope from sentry_sdk.tracing import Span, Transaction from sentry_sdk.tracing_utils import extract_sentrytrace_data @@ -18,25 +19,24 @@ def test_is_sentry_span(): otel_span = MagicMock() - span_processor = SentrySpanProcessor() - assert not span_processor._is_sentry_span(otel_span) + assert not is_sentry_span(otel_span) client = MagicMock() client.options = {"instrumenter": "otel"} client.dsn = "https://1234567890abcdef@o123456.ingest.sentry.io/123456" Scope.get_global_scope().set_client(client) - assert not span_processor._is_sentry_span(otel_span) + assert not is_sentry_span(otel_span) otel_span.attributes = { "http.url": "https://example.com", } - assert not span_processor._is_sentry_span(otel_span) + assert not is_sentry_span(otel_span) otel_span.attributes = { "http.url": "https://o123456.ingest.sentry.io/api/123/envelope", } - assert span_processor._is_sentry_span(otel_span) + assert is_sentry_span(otel_span) def test_get_otel_context(): From acd7cf23bea251607defbfe8216fa3f983ab80ae Mon Sep 17 00:00:00 2001 From: Daniel Szoke Date: Mon, 22 Jul 2024 16:26:09 +0200 Subject: [PATCH 005/244] feat(tracing): Remove `instrumenter` option Remove `instrumenter` parameter from all functions that accept it (details below), and modify tests to not pass the `instrumenter` parameter to any functions that used to take it. Also, delete `tests/tracing/test_noop_span.py`, which tests functionality removed in this commit. BREAKING CHANGE: - Remove `sentry_sdk.init`'s `instrumenter` kwarg. - Delete `sentry_sdk.contsts.INSTRUMENTER` class. - Remove `sentry_sdk.hub.Hub.start_span`'s `instrumenter` parameter. - Remove `sentry_sdk.hub.Hub.start_transaction`'s `instrumenter` parameter. - Remove `sentry_sdk.scope.Scope.start_transaction`'s `instrumenter` parameter. - Remove `sentry_sdk.scope.Scope.start_span`'s `instrumenter` parameter. - Remove `sentry_sdk.tracing.Span.start_child`'s `instrumenter` parameter. - Remove `sentry_sdk.tracing.NoOpSpan.start_child`'s `instrumenter` parameter. Closes: #3321 --- sentry_sdk/api.py | 6 +-- sentry_sdk/client.py | 5 -- sentry_sdk/consts.py | 6 --- sentry_sdk/hub.py | 23 ++++---- .../opentelemetry/span_processor.py | 17 +----- sentry_sdk/scope.py | 28 ++-------- sentry_sdk/tracing.py | 17 ++---- .../opentelemetry/test_span_processor.py | 11 +--- tests/tracing/test_noop_span.py | 52 ------------------- 9 files changed, 23 insertions(+), 142 deletions(-) delete mode 100644 tests/tracing/test_noop_span.py diff --git a/sentry_sdk/api.py b/sentry_sdk/api.py index 41c4814146..7766ab79a4 100644 --- a/sentry_sdk/api.py +++ b/sentry_sdk/api.py @@ -3,7 +3,6 @@ from sentry_sdk import tracing_utils, Client from sentry_sdk._types import TYPE_CHECKING -from sentry_sdk.consts import INSTRUMENTER from sentry_sdk.scope import Scope, _ScopeManager, new_scope, isolation_scope from sentry_sdk.tracing import NoOpSpan, Transaction @@ -293,7 +292,6 @@ def start_span( @scopemethod def start_transaction( transaction=None, # type: Optional[Transaction] - instrumenter=INSTRUMENTER.SENTRY, # type: str custom_sampling_context=None, # type: Optional[SamplingContext] **kwargs, # type: Unpack[TransactionKwargs] ): @@ -322,15 +320,13 @@ def start_transaction( :param transaction: The transaction to start. If omitted, we create and start a new transaction. - :param instrumenter: This parameter is meant for internal use only. It - will be removed in the next major version. :param custom_sampling_context: The transaction's custom sampling context. :param kwargs: Optional keyword arguments to be passed to the Transaction constructor. See :py:class:`sentry_sdk.tracing.Transaction` for available arguments. """ return Scope.get_current_scope().start_transaction( - transaction, instrumenter, custom_sampling_context, **kwargs + transaction, custom_sampling_context, **kwargs ) diff --git a/sentry_sdk/client.py b/sentry_sdk/client.py index f93aa935c2..cfb2352305 100644 --- a/sentry_sdk/client.py +++ b/sentry_sdk/client.py @@ -25,7 +25,6 @@ from sentry_sdk.consts import ( DEFAULT_MAX_VALUE_LENGTH, DEFAULT_OPTIONS, - INSTRUMENTER, VERSION, ClientConstructor, ) @@ -113,9 +112,6 @@ def _get_options(*args, **kwargs): if rv["server_name"] is None and hasattr(socket, "gethostname"): rv["server_name"] = socket.gethostname() - if rv["instrumenter"] is None: - rv["instrumenter"] = INSTRUMENTER.SENTRY - if rv["project_root"] is None: try: project_root = os.getcwd() @@ -357,7 +353,6 @@ def _capture_envelope(envelope): logger.debug( "[OTel] Enabling experimental OTel-powered performance monitoring." ) - self.options["instrumenter"] = INSTRUMENTER.OTEL if ( "sentry_sdk.integrations.opentelemetry.integration.OpenTelemetryIntegration" not in _DEFAULT_INTEGRATIONS diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py index b4d30cd24a..03d8ce730d 100644 --- a/sentry_sdk/consts.py +++ b/sentry_sdk/consts.py @@ -85,11 +85,6 @@ class EndpointType(Enum): ] -class INSTRUMENTER: - SENTRY = "sentry" - OTEL = "otel" - - class SPANDATA: """ Additional information describing the type of the span. @@ -518,7 +513,6 @@ def __init__( send_client_reports=True, # type: bool _experiments={}, # type: Experiments # noqa: B006 proxy_headers=None, # type: Optional[Dict[str, str]] - instrumenter=INSTRUMENTER.SENTRY, # type: Optional[str] before_send_transaction=None, # type: Optional[TransactionProcessor] project_root=None, # type: Optional[str] enable_tracing=None, # type: Optional[bool] diff --git a/sentry_sdk/hub.py b/sentry_sdk/hub.py index d514c168fa..2341214882 100644 --- a/sentry_sdk/hub.py +++ b/sentry_sdk/hub.py @@ -1,8 +1,11 @@ import warnings + +# Importing sentry_sdk.consts here prevents a circular import, even though it's not used in this file. +import sentry_sdk.consts # noqa: F401 + from contextlib import contextmanager from sentry_sdk._compat import with_metaclass -from sentry_sdk.consts import INSTRUMENTER from sentry_sdk.scope import Scope, _ScopeManager from sentry_sdk.client import Client from sentry_sdk.tracing import ( @@ -394,8 +397,8 @@ def add_breadcrumb(self, crumb=None, hint=None, **kwargs): """ Scope.get_isolation_scope().add_breadcrumb(crumb, hint, **kwargs) - def start_span(self, instrumenter=INSTRUMENTER.SENTRY, **kwargs): - # type: (str, Any) -> Span + def start_span(self, **kwargs): + # type: (Any) -> Span """ .. deprecated:: 2.0.0 This function is deprecated and will be removed in a future release. @@ -416,16 +419,12 @@ def start_span(self, instrumenter=INSTRUMENTER.SENTRY, **kwargs): For supported `**kwargs` see :py:class:`sentry_sdk.tracing.Span`. """ scope = Scope.get_current_scope() - return scope.start_span(instrumenter=instrumenter, **kwargs) + return scope.start_span(**kwargs) def start_transaction( - self, - transaction=None, - instrumenter=INSTRUMENTER.SENTRY, - custom_sampling_context=None, - **kwargs + self, transaction=None, custom_sampling_context=None, **kwargs ): - # type: (Optional[Transaction], str, Optional[SamplingContext], Unpack[TransactionKwargs]) -> Union[Transaction, NoOpSpan] + # type: (Optional[Transaction], Optional[SamplingContext], Unpack[TransactionKwargs]) -> Union[Transaction, NoOpSpan] """ .. deprecated:: 2.0.0 This function is deprecated and will be removed in a future release. @@ -461,9 +460,7 @@ def start_transaction( # Type checking disabled for this line because deprecated keys are not allowed in the type signature. kwargs["hub"] = scope # type: ignore - return scope.start_transaction( - transaction, instrumenter, custom_sampling_context, **kwargs - ) + return scope.start_transaction(transaction, custom_sampling_context, **kwargs) def continue_trace(self, environ_or_headers, op=None, name=None, source=None): # type: (Dict[str, Any], Optional[str], Optional[str], Optional[str]) -> Transaction diff --git a/sentry_sdk/integrations/opentelemetry/span_processor.py b/sentry_sdk/integrations/opentelemetry/span_processor.py index b5dec4f6be..b45c806051 100644 --- a/sentry_sdk/integrations/opentelemetry/span_processor.py +++ b/sentry_sdk/integrations/opentelemetry/span_processor.py @@ -14,7 +14,7 @@ INVALID_TRACE_ID, ) from sentry_sdk import get_client, start_transaction -from sentry_sdk.consts import INSTRUMENTER, SPANSTATUS +from sentry_sdk.consts import SPANSTATUS from sentry_sdk.integrations.opentelemetry.consts import ( SENTRY_BAGGAGE_KEY, SENTRY_TRACE_KEY, @@ -40,11 +40,6 @@ def link_trace_context_to_error_event(event, otel_span_map): # type: (Event, dict[str, Union[Transaction, SentrySpan]]) -> Event - client = get_client() - - if client.options["instrumenter"] != INSTRUMENTER.OTEL: - return event - if hasattr(event, "type") and event["type"] == "transaction": return event @@ -117,9 +112,6 @@ def on_start(self, otel_span, parent_context=None): if not client.dsn: return - if client.options["instrumenter"] != INSTRUMENTER.OTEL: - return - if not otel_span.get_span_context().is_valid: return @@ -145,7 +137,6 @@ def on_start(self, otel_span, parent_context=None): span_id=trace_data["span_id"], description=otel_span.name, start_timestamp=start_timestamp, - instrumenter=INSTRUMENTER.OTEL, origin=SPAN_ORIGIN, ) else: @@ -156,7 +147,6 @@ def on_start(self, otel_span, parent_context=None): trace_id=trace_data["trace_id"], baggage=trace_data["baggage"], start_timestamp=start_timestamp, - instrumenter=INSTRUMENTER.OTEL, origin=SPAN_ORIGIN, ) @@ -174,11 +164,6 @@ def on_start(self, otel_span, parent_context=None): def on_end(self, otel_span): # type: (OTelSpan) -> None - client = get_client() - - if client.options["instrumenter"] != INSTRUMENTER.OTEL: - return - span_context = otel_span.get_span_context() if not span_context.is_valid: return diff --git a/sentry_sdk/scope.py b/sentry_sdk/scope.py index 1febbd0ef2..233dfa25f2 100644 --- a/sentry_sdk/scope.py +++ b/sentry_sdk/scope.py @@ -9,7 +9,7 @@ from itertools import chain from sentry_sdk.attachments import Attachment -from sentry_sdk.consts import DEFAULT_MAX_BREADCRUMBS, FALSE_VALUES, INSTRUMENTER +from sentry_sdk.consts import DEFAULT_MAX_BREADCRUMBS, FALSE_VALUES from sentry_sdk.profiler.continuous_profiler import try_autostart_continuous_profiler from sentry_sdk.profiler.transaction_profiler import Profile from sentry_sdk.session import Session @@ -956,13 +956,9 @@ def add_breadcrumb(self, crumb=None, hint=None, **kwargs): self._breadcrumbs.popleft() def start_transaction( - self, - transaction=None, - instrumenter=INSTRUMENTER.SENTRY, - custom_sampling_context=None, - **kwargs + self, transaction=None, custom_sampling_context=None, **kwargs ): - # type: (Optional[Transaction], str, Optional[SamplingContext], Unpack[TransactionKwargs]) -> Union[Transaction, NoOpSpan] + # type: (Optional[Transaction], Optional[SamplingContext], Unpack[TransactionKwargs]) -> Union[Transaction, NoOpSpan] """ Start and return a transaction. @@ -987,8 +983,6 @@ def start_transaction( :param transaction: The transaction to start. If omitted, we create and start a new transaction. - :param instrumenter: This parameter is meant for internal use only. It - will be removed in the next major version. :param custom_sampling_context: The transaction's custom sampling context. :param kwargs: Optional keyword arguments to be passed to the Transaction constructor. See :py:class:`sentry_sdk.tracing.Transaction` for @@ -998,11 +992,6 @@ def start_transaction( client = Scope.get_client() - configuration_instrumenter = client.options["instrumenter"] - - if instrumenter != configuration_instrumenter: - return NoOpSpan() - try_autostart_continuous_profiler() custom_sampling_context = custom_sampling_context or {} @@ -1039,8 +1028,8 @@ def start_transaction( return transaction - def start_span(self, instrumenter=INSTRUMENTER.SENTRY, **kwargs): - # type: (str, Any) -> Span + def start_span(self, **kwargs): + # type: (Any) -> Span """ Start a span whose parent is the currently active span or transaction, if any. @@ -1063,13 +1052,6 @@ def start_span(self, instrumenter=INSTRUMENTER.SENTRY, **kwargs): with new_scope(): kwargs.setdefault("scope", self) - client = Scope.get_client() - - configuration_instrumenter = client.options["instrumenter"] - - if instrumenter != configuration_instrumenter: - return NoOpSpan() - # get current span or transaction span = self.span or Scope.get_isolation_scope().span diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py index 8e74707608..03ae08d23d 100644 --- a/sentry_sdk/tracing.py +++ b/sentry_sdk/tracing.py @@ -4,7 +4,7 @@ from datetime import datetime, timedelta, timezone import sentry_sdk -from sentry_sdk.consts import INSTRUMENTER, SPANSTATUS, SPANDATA +from sentry_sdk.consts import SPANSTATUS, SPANDATA from sentry_sdk.profiler.continuous_profiler import get_profiler_id from sentry_sdk.utils import ( get_current_thread_meta, @@ -386,8 +386,8 @@ def containing_transaction(self): # referencing themselves) return self._containing_transaction - def start_child(self, instrumenter=INSTRUMENTER.SENTRY, **kwargs): - # type: (str, **Any) -> Span + def start_child(self, **kwargs): + # type: (**Any) -> Span """ Start a sub-span from the current span or transaction. @@ -399,13 +399,6 @@ def start_child(self, instrumenter=INSTRUMENTER.SENTRY, **kwargs): be removed in the next major version. Going forward, it should only be used by the SDK itself. """ - configuration_instrumenter = sentry_sdk.Scope.get_client().options[ - "instrumenter" - ] - - if instrumenter != configuration_instrumenter: - return NoOpSpan() - kwargs.setdefault("sampled", self.sampled) child = Span( @@ -1157,8 +1150,8 @@ def containing_transaction(self): # type: () -> Optional[Transaction] return None - def start_child(self, instrumenter=INSTRUMENTER.SENTRY, **kwargs): - # type: (str, **Any) -> NoOpSpan + def start_child(self, **kwargs): + # type: (**Any) -> NoOpSpan return NoOpSpan() def to_traceparent(self): diff --git a/tests/integrations/opentelemetry/test_span_processor.py b/tests/integrations/opentelemetry/test_span_processor.py index cc52735214..1df014fc7e 100644 --- a/tests/integrations/opentelemetry/test_span_processor.py +++ b/tests/integrations/opentelemetry/test_span_processor.py @@ -22,7 +22,6 @@ def test_is_sentry_span(): assert not is_sentry_span(otel_span) client = MagicMock() - client.options = {"instrumenter": "otel"} client.dsn = "https://1234567890abcdef@o123456.ingest.sentry.io/123456" Scope.get_global_scope().set_client(client) @@ -305,7 +304,6 @@ def test_on_start_transaction(): fake_start_transaction = MagicMock() fake_client = MagicMock() - fake_client.options = {"instrumenter": "otel"} fake_client.dsn = "https://1234567890abcdef@o123456.ingest.sentry.io/123456" Scope.get_global_scope().set_client(fake_client) @@ -325,7 +323,6 @@ def test_on_start_transaction(): start_timestamp=datetime.fromtimestamp( otel_span.start_time / 1e9, timezone.utc ), - instrumenter="otel", origin="auto.otel", ) @@ -349,7 +346,6 @@ def test_on_start_child(): parent_context = {} fake_client = MagicMock() - fake_client.options = {"instrumenter": "otel"} fake_client.dsn = "https://1234567890abcdef@o123456.ingest.sentry.io/123456" Scope.get_global_scope().set_client(fake_client) @@ -365,7 +361,6 @@ def test_on_start_child(): start_timestamp=datetime.fromtimestamp( otel_span.start_time / 1e9, timezone.utc ), - instrumenter="otel", origin="auto.otel", ) @@ -415,7 +410,6 @@ def test_on_end_sentry_transaction(): otel_span.get_span_context.return_value = span_context fake_client = MagicMock() - fake_client.options = {"instrumenter": "otel"} Scope.get_global_scope().set_client(fake_client) fake_sentry_span = MagicMock(spec=Transaction) @@ -451,7 +445,6 @@ def test_on_end_sentry_span(): otel_span.get_span_context.return_value = span_context fake_client = MagicMock() - fake_client.options = {"instrumenter": "otel"} Scope.get_global_scope().set_client(fake_client) fake_sentry_span = MagicMock(spec=Span) @@ -478,7 +471,6 @@ def test_link_trace_context_to_error_event(): Test that the trace context is added to the error event. """ fake_client = MagicMock() - fake_client.options = {"instrumenter": "otel"} Scope.get_global_scope().set_client(fake_client) span_id = "1234567890abcdef" @@ -535,7 +527,7 @@ def test_pruning_old_spans_on_start(): parent_context = {} fake_client = MagicMock() - fake_client.options = {"instrumenter": "otel", "debug": False} + fake_client.options = {"debug": False} fake_client.dsn = "https://1234567890abcdef@o123456.ingest.sentry.io/123456" Scope.get_global_scope().set_client(fake_client) @@ -578,7 +570,6 @@ def test_pruning_old_spans_on_end(): otel_span.parent.span_id = int("abcdef1234567890", 16) fake_client = MagicMock() - fake_client.options = {"instrumenter": "otel"} Scope.get_global_scope().set_client(fake_client) fake_sentry_span = MagicMock(spec=Span) diff --git a/tests/tracing/test_noop_span.py b/tests/tracing/test_noop_span.py deleted file mode 100644 index c9aad60590..0000000000 --- a/tests/tracing/test_noop_span.py +++ /dev/null @@ -1,52 +0,0 @@ -import sentry_sdk -from sentry_sdk.tracing import NoOpSpan - -# These tests make sure that the examples from the documentation [1] -# are working when OTel (OpenTelemetry) instrumentation is turned on, -# and therefore, the Sentry tracing should not do anything. -# -# 1: https://docs.sentry.io/platforms/python/performance/instrumentation/custom-instrumentation/ - - -def test_noop_start_transaction(sentry_init): - sentry_init(instrumenter="otel") - - with sentry_sdk.start_transaction( - op="task", name="test_transaction_name" - ) as transaction: - assert isinstance(transaction, NoOpSpan) - assert sentry_sdk.Scope.get_current_scope().span is transaction - - transaction.name = "new name" - - -def test_noop_start_span(sentry_init): - sentry_init(instrumenter="otel") - - with sentry_sdk.start_span(op="http", description="GET /") as span: - assert isinstance(span, NoOpSpan) - assert sentry_sdk.Scope.get_current_scope().span is span - - span.set_tag("http.response.status_code", 418) - span.set_data("http.entity_type", "teapot") - - -def test_noop_transaction_start_child(sentry_init): - sentry_init(instrumenter="otel") - - transaction = sentry_sdk.start_transaction(name="task") - assert isinstance(transaction, NoOpSpan) - - with transaction.start_child(op="child_task") as child: - assert isinstance(child, NoOpSpan) - assert sentry_sdk.Scope.get_current_scope().span is child - - -def test_noop_span_start_child(sentry_init): - sentry_init(instrumenter="otel") - span = sentry_sdk.start_span(name="task") - assert isinstance(span, NoOpSpan) - - with span.start_child(op="child_task") as child: - assert isinstance(child, NoOpSpan) - assert sentry_sdk.Scope.get_current_scope().span is child From 980b2684657cb703f2412832ac4159ab14a66077 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Fri, 26 Jul 2024 16:14:53 +0200 Subject: [PATCH 006/244] Span description, op, and status improvements (#3259) Improved extraction of op, description, status and http_status for a Sentry span from an OpenTelemenetry span. Fixes #3236 --------- Co-authored-by: Neel Shah Co-authored-by: Ivana Kellyerova --- .../opentelemetry/potel_span_processor.py | 8 +- .../opentelemetry/span_processor.py | 35 +- .../integrations/opentelemetry/utils.py | 206 ++++++++-- .../opentelemetry/test_span_processor.py | 29 +- .../integrations/opentelemetry/test_utils.py | 362 ++++++++++++++++++ 5 files changed, 545 insertions(+), 95 deletions(-) create mode 100644 tests/integrations/opentelemetry/test_utils.py diff --git a/sentry_sdk/integrations/opentelemetry/potel_span_processor.py b/sentry_sdk/integrations/opentelemetry/potel_span_processor.py index faa583a18d..9604676dce 100644 --- a/sentry_sdk/integrations/opentelemetry/potel_span_processor.py +++ b/sentry_sdk/integrations/opentelemetry/potel_span_processor.py @@ -116,14 +116,14 @@ def _root_span_to_transaction_event(self, span): span_id = format_span_id(span.context.span_id) parent_span_id = format_span_id(span.parent.span_id) if span.parent else None - (op, description, _) = extract_span_data(span) + (op, description, status, _) = extract_span_data(span) trace_context = { "trace_id": trace_id, "span_id": span_id, "origin": SPAN_ORIGIN, "op": op, - "status": "ok", # TODO-neel-potel span status mapping + "status": status, } # type: dict[str, Any] if parent_span_id: @@ -160,7 +160,7 @@ def _span_to_json(self, span): span_id = format_span_id(span.context.span_id) parent_span_id = format_span_id(span.parent.span_id) if span.parent else None - (op, description, _) = extract_span_data(span) + (op, description, status, _) = extract_span_data(span) span_json = { "trace_id": trace_id, @@ -168,7 +168,7 @@ def _span_to_json(self, span): "origin": SPAN_ORIGIN, "op": op, "description": description, - "status": "ok", # TODO-neel-potel span status mapping + "status": status, "start_timestamp": convert_otel_timestamp(span.start_time), "timestamp": convert_otel_timestamp(span.end_time), } # type: dict[str, Any] diff --git a/sentry_sdk/integrations/opentelemetry/span_processor.py b/sentry_sdk/integrations/opentelemetry/span_processor.py index b45c806051..594ccbb71f 100644 --- a/sentry_sdk/integrations/opentelemetry/span_processor.py +++ b/sentry_sdk/integrations/opentelemetry/span_processor.py @@ -14,7 +14,6 @@ INVALID_TRACE_ID, ) from sentry_sdk import get_client, start_transaction -from sentry_sdk.consts import SPANSTATUS from sentry_sdk.integrations.opentelemetry.consts import ( SENTRY_BAGGAGE_KEY, SENTRY_TRACE_KEY, @@ -175,8 +174,6 @@ def on_end(self, otel_span): sentry_span.op = otel_span.name - self._update_span_with_otel_status(sentry_span, otel_span) - if isinstance(sentry_span, Transaction): sentry_span.name = otel_span.name sentry_span.set_context( @@ -249,20 +246,6 @@ def _get_trace_data(self, otel_span, parent_context): return trace_data - def _update_span_with_otel_status(self, sentry_span, otel_span): - # type: (SentrySpan, OTelSpan) -> None - """ - Set the Sentry span status from the OTel span - """ - if otel_span.status.is_unset: - return - - if otel_span.status.is_ok: - sentry_span.set_status(SPANSTATUS.OK) - return - - sentry_span.set_status(SPANSTATUS.INTERNAL_ERROR) - def _update_span_with_otel_data(self, sentry_span, otel_span): # type: (SentrySpan, OTelSpan) -> None """ @@ -275,15 +258,21 @@ def _update_span_with_otel_data(self, sentry_span, otel_span): for key, val in otel_span.attributes.items(): sentry_span.set_data(key, val) - (op, description, status_code) = extract_span_data(otel_span) + (op, description, status, http_status) = extract_span_data(otel_span) sentry_span.op = op sentry_span.description = description - if status_code: - sentry_span.set_http_status(status_code) + + if http_status: + sentry_span.set_http_status(http_status) + elif status: + sentry_span.set_status(status) def _update_transaction_with_otel_data(self, sentry_span, otel_span): # type: (SentrySpan, OTelSpan) -> None - (op, _, status_code) = extract_span_data(otel_span) + (op, _, status, http_status) = extract_span_data(otel_span) sentry_span.op = op - if status_code: - sentry_span.set_http_status(status_code) + + if http_status: + sentry_span.set_http_status(http_status) + elif status: + sentry_span.set_status(status) diff --git a/sentry_sdk/integrations/opentelemetry/utils.py b/sentry_sdk/integrations/opentelemetry/utils.py index 1dc77ab150..cb04dd8e1a 100644 --- a/sentry_sdk/integrations/opentelemetry/utils.py +++ b/sentry_sdk/integrations/opentelemetry/utils.py @@ -1,9 +1,11 @@ from typing import cast from datetime import datetime, timezone -from opentelemetry.trace import SpanKind +from opentelemetry.trace import SpanKind, StatusCode from opentelemetry.semconv.trace import SpanAttributes from opentelemetry.sdk.trace import ReadableSpan +from sentry_sdk.consts import SPANSTATUS +from sentry_sdk.tracing import get_span_status_from_http_code from urllib3.util import parse_url as urlparse from sentry_sdk import get_client @@ -12,7 +14,27 @@ from sentry_sdk._types import TYPE_CHECKING if TYPE_CHECKING: - from typing import Optional, Tuple + from typing import Optional, Mapping, Sequence + + +GRPC_ERROR_MAP = { + "1": SPANSTATUS.CANCELLED, + "2": SPANSTATUS.UNKNOWN_ERROR, + "3": SPANSTATUS.INVALID_ARGUMENT, + "4": SPANSTATUS.DEADLINE_EXCEEDED, + "5": SPANSTATUS.NOT_FOUND, + "6": SPANSTATUS.ALREADY_EXISTS, + "7": SPANSTATUS.PERMISSION_DENIED, + "8": SPANSTATUS.RESOURCE_EXHAUSTED, + "9": SPANSTATUS.FAILED_PRECONDITION, + "10": SPANSTATUS.ABORTED, + "11": SPANSTATUS.OUT_OF_RANGE, + "12": SPANSTATUS.UNIMPLEMENTED, + "13": SPANSTATUS.INTERNAL_ERROR, + "14": SPANSTATUS.UNAVAILABLE, + "15": SPANSTATUS.DATA_LOSS, + "16": SPANSTATUS.UNAUTHENTICATED, +} def is_sentry_span(span): @@ -54,52 +76,152 @@ def convert_otel_timestamp(time): def extract_span_data(span): - # type: (ReadableSpan) -> Tuple[str, str, Optional[int]] + # type: (ReadableSpan) -> tuple[str, str, Optional[str], Optional[int]] op = span.name description = span.name - status_code = None + status, http_status = extract_span_status(span) if span.attributes is None: - return (op, description, status_code) + return (op, description, status, http_status) http_method = span.attributes.get(SpanAttributes.HTTP_METHOD) http_method = cast("Optional[str]", http_method) + if http_method: + return span_data_for_http_method(span) + db_query = span.attributes.get(SpanAttributes.DB_SYSTEM) + if db_query: + return span_data_for_db_query(span) - if http_method: - op = "http" - if span.kind == SpanKind.SERVER: - op += ".server" - elif span.kind == SpanKind.CLIENT: - op += ".client" - - description = http_method - - peer_name = span.attributes.get(SpanAttributes.NET_PEER_NAME, None) - if peer_name: - description += " {}".format(peer_name) - - target = span.attributes.get(SpanAttributes.HTTP_TARGET, None) - if target: - description += " {}".format(target) - - if not peer_name and not target: - url = span.attributes.get(SpanAttributes.HTTP_URL, None) - url = cast("Optional[str]", url) - if url: - parsed_url = urlparse(url) - url = "{}://{}{}".format( - parsed_url.scheme, parsed_url.netloc, parsed_url.path - ) - description += " {}".format(url) - - status_code = span.attributes.get(SpanAttributes.HTTP_STATUS_CODE) - elif db_query: - op = "db" - statement = span.attributes.get(SpanAttributes.DB_STATEMENT, None) - statement = cast("Optional[str]", statement) - if statement: - description = statement - - status_code = cast("Optional[int]", status_code) - return (op, description, status_code) + rpc_service = span.attributes.get(SpanAttributes.RPC_SERVICE) + if rpc_service: + return ("rpc", description, status, http_status) + + messaging_system = span.attributes.get(SpanAttributes.MESSAGING_SYSTEM) + if messaging_system: + return ("message", description, status, http_status) + + faas_trigger = span.attributes.get(SpanAttributes.FAAS_TRIGGER) + if faas_trigger: + return ( + str(faas_trigger), + description, + status, + http_status, + ) + + return (op, description, status, http_status) + + +def span_data_for_http_method(span): + # type: (ReadableSpan) -> tuple[str, str, Optional[str], Optional[int]] + span_attributes = span.attributes or {} + + op = "http" + + if span.kind == SpanKind.SERVER: + op += ".server" + elif span.kind == SpanKind.CLIENT: + op += ".client" + + http_method = span_attributes.get(SpanAttributes.HTTP_METHOD) + route = span_attributes.get(SpanAttributes.HTTP_ROUTE) + target = span_attributes.get(SpanAttributes.HTTP_TARGET) + peer_name = span_attributes.get(SpanAttributes.NET_PEER_NAME) + + description = f"{http_method}" + + if route: + description = f"{http_method} {route}" + elif target: + description = f"{http_method} {target}" + elif peer_name: + description = f"{http_method} {peer_name}" + else: + url = span_attributes.get(SpanAttributes.HTTP_URL) + url = cast("Optional[str]", url) + + if url: + parsed_url = urlparse(url) + url = "{}://{}{}".format( + parsed_url.scheme, parsed_url.netloc, parsed_url.path + ) + description = f"{http_method} {url}" + + status, http_status = extract_span_status(span) + + return (op, description, status, http_status) + + +def span_data_for_db_query(span): + # type: (ReadableSpan) -> tuple[str, str, Optional[str], Optional[int]] + span_attributes = span.attributes or {} + + op = "db" + + statement = span_attributes.get(SpanAttributes.DB_STATEMENT, None) + statement = cast("Optional[str]", statement) + + description = statement or span.name + + return (op, description, None, None) + + +def extract_span_status(span): + # type: (ReadableSpan) -> tuple[Optional[str], Optional[int]] + span_attributes = span.attributes or {} + status = span.status or None + + if status: + inferred_status, http_status = infer_status_from_attributes(span_attributes) + + if status.status_code == StatusCode.OK: + return (SPANSTATUS.OK, http_status) + elif status.status_code == StatusCode.ERROR: + if status.description is None: + if inferred_status: + return (inferred_status, http_status) + + if ( + status.description is not None + and status.description in GRPC_ERROR_MAP.values() + ): + return (status.description, None) + else: + return (SPANSTATUS.UNKNOWN_ERROR, None) + + inferred_status, http_status = infer_status_from_attributes(span_attributes) + if inferred_status: + return (inferred_status, http_status) + + if status and status.status_code == StatusCode.UNSET: + return (SPANSTATUS.OK, None) + else: + return (SPANSTATUS.UNKNOWN_ERROR, None) + + +def infer_status_from_attributes(span_attributes): + # type: (Mapping[str, str | bool | int | float | Sequence[str] | Sequence[bool] | Sequence[int] | Sequence[float]]) -> tuple[Optional[str], Optional[int]] + http_status = get_http_status_code(span_attributes) + + if http_status: + return (get_span_status_from_http_code(http_status), http_status) + + grpc_status = span_attributes.get(SpanAttributes.RPC_GRPC_STATUS_CODE) + if grpc_status: + return (GRPC_ERROR_MAP.get(str(grpc_status), SPANSTATUS.UNKNOWN_ERROR), None) + + return (None, None) + + +def get_http_status_code(span_attributes): + # type: (Mapping[str, str | bool | int | float | Sequence[str] | Sequence[bool] | Sequence[int] | Sequence[float]]) -> Optional[int] + http_status = span_attributes.get(SpanAttributes.HTTP_RESPONSE_STATUS_CODE) + + if http_status is None: + # Fall back to the deprecated attribute + http_status = span_attributes.get(SpanAttributes.HTTP_STATUS_CODE) + + http_status = cast("Optional[int]", http_status) + + return http_status diff --git a/tests/integrations/opentelemetry/test_span_processor.py b/tests/integrations/opentelemetry/test_span_processor.py index 1df014fc7e..12b1e91ea4 100644 --- a/tests/integrations/opentelemetry/test_span_processor.py +++ b/tests/integrations/opentelemetry/test_span_processor.py @@ -3,7 +3,6 @@ from unittest import mock from unittest.mock import MagicMock -import pytest from opentelemetry.trace import SpanKind, SpanContext, Status, StatusCode from sentry_sdk.integrations.opentelemetry.span_processor import ( @@ -200,7 +199,7 @@ def test_update_span_with_otel_data_http_method(): span_processor._update_span_with_otel_data(sentry_span, otel_span) assert sentry_span.op == "http.client" - assert sentry_span.description == "GET example.com /" + assert sentry_span.description == "GET /" assert sentry_span.status == "resource_exhausted" assert sentry_span._data["http.method"] == "GET" @@ -211,28 +210,6 @@ def test_update_span_with_otel_data_http_method(): assert sentry_span._data["http.target"] == "/" -@pytest.mark.parametrize( - "otel_status, expected_status", - [ - pytest.param(Status(StatusCode.UNSET), None, id="unset"), - pytest.param(Status(StatusCode.OK), "ok", id="ok"), - pytest.param(Status(StatusCode.ERROR), "internal_error", id="error"), - ], -) -def test_update_span_with_otel_status(otel_status, expected_status): - sentry_span = Span() - - otel_span = MagicMock() - otel_span.name = "Test OTel Span" - otel_span.kind = SpanKind.INTERNAL - otel_span.status = otel_status - - span_processor = SentrySpanProcessor() - span_processor._update_span_with_otel_status(sentry_span, otel_span) - - assert sentry_span.get_trace_context().get("status") == expected_status - - def test_update_span_with_otel_data_http_method2(): sentry_span = Span() @@ -419,13 +396,14 @@ def test_on_end_sentry_transaction(): span_processor = SentrySpanProcessor() span_processor._get_otel_context = MagicMock() span_processor._update_span_with_otel_data = MagicMock() + span_processor._update_transaction_with_otel_data = MagicMock() span_processor.otel_span_map["1234567890abcdef"] = fake_sentry_span span_processor.on_end(otel_span) fake_sentry_span.set_context.assert_called_once() span_processor._update_span_with_otel_data.assert_not_called() - fake_sentry_span.set_status.assert_called_once_with("ok") + span_processor._update_transaction_with_otel_data.assert_called_once() fake_sentry_span.finish.assert_called_once() @@ -462,7 +440,6 @@ def test_on_end_sentry_span(): span_processor._update_span_with_otel_data.assert_called_once_with( fake_sentry_span, otel_span ) - fake_sentry_span.set_status.assert_called_once_with("ok") fake_sentry_span.finish.assert_called_once() diff --git a/tests/integrations/opentelemetry/test_utils.py b/tests/integrations/opentelemetry/test_utils.py new file mode 100644 index 0000000000..ceb58a58ef --- /dev/null +++ b/tests/integrations/opentelemetry/test_utils.py @@ -0,0 +1,362 @@ +from unittest.mock import MagicMock + +import pytest +from opentelemetry.trace import SpanKind, Status, StatusCode + +from sentry_sdk.integrations.opentelemetry.utils import ( + extract_span_data, + extract_span_status, + span_data_for_db_query, + span_data_for_http_method, +) + + +@pytest.mark.parametrize( + "name, status, attributes, expected", + [ + ( + "OTel Span Blank", + Status(StatusCode.UNSET), # Unset defaults to OK + {}, + { + "op": "OTel Span Blank", + "description": "OTel Span Blank", + "status": "ok", + "http_status_code": None, + }, + ), + ( + "OTel Span RPC", + Status(StatusCode.UNSET), # Unset defaults to OK + { + "rpc.service": "myservice.EchoService", + }, + { + "op": "rpc", + "description": "OTel Span RPC", + "status": "ok", + "http_status_code": None, + }, + ), + ( + "OTel Span Messaging", + Status(StatusCode.UNSET), # Unset defaults to OK + { + "messaging.system": "rabbitmq", + }, + { + "op": "message", + "description": "OTel Span Messaging", + "status": "ok", + "http_status_code": None, + }, + ), + ( + "OTel Span FaaS", + Status(StatusCode.UNSET), # Unset defaults to OK + { + "faas.trigger": "pubsub", + }, + { + "op": "pubsub", + "description": "OTel Span FaaS", + "status": "ok", + "http_status_code": None, + }, + ), + ], +) +def test_extract_span_data(name, status, attributes, expected): + otel_span = MagicMock() + otel_span.name = name + otel_span.status = Status(StatusCode.UNSET) + otel_span.attributes = attributes + + op, description, status, http_status_code = extract_span_data(otel_span) + result = { + "op": op, + "description": description, + "status": status, + "http_status_code": http_status_code, + } + assert result == expected + + +@pytest.mark.parametrize( + "kind, status, attributes, expected", + [ + ( + SpanKind.CLIENT, + Status(StatusCode.OK), + { + "http.method": "GET", + "http.target": None, # no location for description + "net.peer.name": None, + "http.url": None, + }, + { + "op": "http.client", + "description": "GET", + "status": "ok", + "http_status_code": None, + }, + ), + ( + SpanKind.CLIENT, + Status(StatusCode.OK), + { + "http.method": "GET", + "http.target": "/target", # this can be the location in the description + }, + { + "op": "http.client", + "description": "GET /target", + "status": "ok", + "http_status_code": None, + }, + ), + ( + SpanKind.CLIENT, + Status(StatusCode.OK), + { + "http.method": "GET", + "net.peer.name": "example.com", # this can be the location in the description + }, + { + "op": "http.client", + "description": "GET example.com", + "status": "ok", + "http_status_code": None, + }, + ), + ( + SpanKind.CLIENT, + Status(StatusCode.OK), + { + "http.method": "GET", + "http.target": "/target", # target takes precedence over net.peer.name + "net.peer.name": "example.com", + }, + { + "op": "http.client", + "description": "GET /target", + "status": "ok", + "http_status_code": None, + }, + ), + ( + SpanKind.CLIENT, + Status(StatusCode.OK), + { + "http.method": "GET", + "http.url": "https://username:secretpwd@example.com/bla/?secret=123&anothersecret=456", # sensitive data is stripped + }, + { + "op": "http.client", + "description": "GET https://example.com/bla/", + "status": "ok", + "http_status_code": None, + }, + ), + ], +) +def test_span_data_for_http_method(kind, status, attributes, expected): + otel_span = MagicMock() + otel_span.kind = kind + otel_span.status = status + otel_span.attributes = attributes + + op, description, status, http_status_code = span_data_for_http_method(otel_span) + result = { + "op": op, + "description": description, + "status": status, + "http_status_code": http_status_code, + } + assert result == expected + + +def test_span_data_for_db_query(): + otel_span = MagicMock() + otel_span.name = "OTel Span" + otel_span.attributes = {} + + op, description, status, http_status = span_data_for_db_query(otel_span) + assert op == "db" + assert description == "OTel Span" + assert status is None + assert http_status is None + + otel_span.attributes = {"db.statement": "SELECT * FROM table;"} + + op, description, status, http_status = span_data_for_db_query(otel_span) + assert op == "db" + assert description == "SELECT * FROM table;" + assert status is None + assert http_status is None + + +@pytest.mark.parametrize( + "kind, status, attributes, expected", + [ + ( + SpanKind.CLIENT, + None, # None means unknown error + { + "http.method": "POST", + "http.route": "/some/route", + }, + { + "status": "unknown_error", + "http_status_code": None, + }, + ), + ( + SpanKind.CLIENT, + None, + { + "http.method": "POST", + "http.route": "/some/route", + "http.status_code": 502, # Take this status in case of None status + }, + { + "status": "internal_error", + "http_status_code": 502, + }, + ), + ( + SpanKind.SERVER, + Status(StatusCode.UNSET), # Unset defaults to OK + { + "http.method": "POST", + "http.route": "/some/route", + }, + { + "status": "ok", + "http_status_code": None, + }, + ), + ( + SpanKind.SERVER, + Status(StatusCode.UNSET), + { + "http.method": "POST", + "http.route": "/some/route", + "http.status_code": 502, # Take this status in case of UNSET status + }, + { + "status": "internal_error", + "http_status_code": 502, + }, + ), + ( + SpanKind.SERVER, + None, + { + "http.method": "POST", + "http.route": "/some/route", + "http.status_code": 502, + "http.response.status_code": 503, # this takes precedence over deprecated http.status_code + }, + { + "status": "unavailable", + "http_status_code": 503, + }, + ), + ( + SpanKind.SERVER, + Status(StatusCode.UNSET), + { + "http.method": "POST", + "http.route": "/some/route", + "http.status_code": 502, + "http.response.status_code": 503, # this takes precedence over deprecated http.status_code + }, + { + "status": "unavailable", + "http_status_code": 503, + }, + ), + ( + SpanKind.SERVER, + Status(StatusCode.OK), # OK status is taken right away + { + "http.method": "POST", + "http.route": "/some/route", + }, + { + "status": "ok", + "http_status_code": None, + }, + ), + ( + SpanKind.SERVER, + Status(StatusCode.OK), # OK status is taken right away + { + "http.method": "POST", + "http.route": "/some/route", + "http.response.status_code": 200, + }, + { + "status": "ok", + "http_status_code": 200, + }, + ), + ( + SpanKind.SERVER, + Status( + StatusCode.ERROR + ), # Error status without description gets the http status from attributes + { + "http.method": "POST", + "http.route": "/some/route", + "http.response.status_code": 401, + }, + { + "status": "unauthenticated", + "http_status_code": 401, + }, + ), + ( + SpanKind.SERVER, + Status( + StatusCode.ERROR, "I'm a teapot" + ), # Error status with unknown description is an unknown error + { + "http.method": "POST", + "http.route": "/some/route", + "http.response.status_code": 418, + }, + { + "status": "unknown_error", + "http_status_code": None, + }, + ), + ( + SpanKind.SERVER, + Status( + StatusCode.ERROR, "unimplemented" + ), # Error status with known description is taken (grpc errors) + { + "http.method": "POST", + "http.route": "/some/route", + }, + { + "status": "unimplemented", + "http_status_code": None, + }, + ), + ], +) +def test_extract_span_status(kind, status, attributes, expected): + otel_span = MagicMock() + otel_span.kind = kind + otel_span.status = status + otel_span.attributes = attributes + + status, http_status_code = extract_span_status(otel_span) + result = { + "status": status, + "http_status_code": http_status_code, + } + assert result == expected From 86daf735af9ac68d7fc79f40be2dd67604f86a06 Mon Sep 17 00:00:00 2001 From: Ivana Kellyer Date: Tue, 30 Jul 2024 14:05:28 +0200 Subject: [PATCH 007/244] Sync with master --- .github/workflows/test-integrations-ai.yml | 4 +- .../test-integrations-aws-lambda.yml | 2 +- .../test-integrations-cloud-computing.yml | 4 +- .../workflows/test-integrations-common.yml | 2 +- .../test-integrations-data-processing.yml | 4 +- .../workflows/test-integrations-databases.yml | 4 +- .../workflows/test-integrations-graphql.yml | 4 +- .../test-integrations-miscellaneous.yml | 4 +- .../test-integrations-networking.yml | 4 +- .../test-integrations-web-frameworks-1.yml | 4 +- .../test-integrations-web-frameworks-2.yml | 4 +- CHANGELOG.md | 74 +++++++-- MIGRATION_GUIDE.md | 10 +- checkouts/data-schemas | 2 +- constraints.txt | 3 + docs/conf.py | 2 +- pyproject.toml | 4 + .../templates/test_group.jinja | 5 +- sentry_sdk/__init__.py | 16 +- sentry_sdk/_init_implementation.py | 2 +- sentry_sdk/ai/monitoring.py | 38 ++++- sentry_sdk/api.py | 123 +++++++++----- sentry_sdk/attachments.py | 19 +++ sentry_sdk/client.py | 2 +- sentry_sdk/consts.py | 7 +- sentry_sdk/debug.py | 4 +- sentry_sdk/envelope.py | 4 +- sentry_sdk/hub.py | 73 ++++---- sentry_sdk/integrations/__init__.py | 45 +++-- sentry_sdk/integrations/aiohttp.py | 8 +- sentry_sdk/integrations/ariadne.py | 9 +- sentry_sdk/integrations/arq.py | 10 +- sentry_sdk/integrations/atexit.py | 3 +- sentry_sdk/integrations/aws_lambda.py | 4 +- sentry_sdk/integrations/bottle.py | 3 +- sentry_sdk/integrations/celery/__init__.py | 9 +- sentry_sdk/integrations/celery/beat.py | 3 +- sentry_sdk/integrations/django/__init__.py | 12 +- sentry_sdk/integrations/django/asgi.py | 5 +- sentry_sdk/integrations/django/templates.py | 3 +- sentry_sdk/integrations/django/views.py | 3 +- sentry_sdk/integrations/falcon.py | 3 +- sentry_sdk/integrations/fastapi.py | 10 +- sentry_sdk/integrations/flask.py | 10 +- sentry_sdk/integrations/gql.py | 4 +- sentry_sdk/integrations/graphene.py | 65 +++++++- sentry_sdk/integrations/grpc/aio/client.py | 6 +- sentry_sdk/integrations/grpc/client.py | 6 +- sentry_sdk/integrations/httpx.py | 5 +- sentry_sdk/integrations/huey.py | 6 +- .../integrations/opentelemetry/distro.py | 66 -------- .../integrations/opentelemetry/integration.py | 156 +++--------------- sentry_sdk/integrations/pyramid.py | 8 +- sentry_sdk/integrations/quart.py | 10 +- sentry_sdk/integrations/rq.py | 3 +- sentry_sdk/integrations/sanic.py | 5 +- sentry_sdk/integrations/spark/spark_driver.py | 3 +- sentry_sdk/integrations/spark/spark_worker.py | 3 +- sentry_sdk/integrations/starlette.py | 20 ++- sentry_sdk/integrations/starlite.py | 6 +- sentry_sdk/integrations/stdlib.py | 9 +- sentry_sdk/integrations/strawberry.py | 8 +- sentry_sdk/integrations/threading.py | 8 +- sentry_sdk/metrics.py | 2 +- sentry_sdk/profiler/transaction_profiler.py | 6 +- sentry_sdk/scope.py | 61 +++---- sentry_sdk/tracing.py | 10 +- sentry_sdk/tracing_utils.py | 4 +- sentry_sdk/utils.py | 10 +- setup.py | 58 +------ tests/conftest.py | 18 +- tests/integrations/celery/test_celery.py | 54 +++--- .../celery/test_update_celery_task_headers.py | 6 +- tests/integrations/django/myapp/views.py | 10 +- tests/integrations/django/test_basic.py | 10 +- tests/integrations/falcon/test_falcon.py | 9 +- tests/integrations/flask/test_flask.py | 11 +- tests/integrations/graphene/test_graphene.py | 80 +++++++++ tests/integrations/loguru/test_loguru.py | 4 +- .../opentelemetry/test_experimental.py | 76 --------- .../opentelemetry/test_span_processor.py | 18 +- tests/integrations/quart/test_quart.py | 10 +- tests/integrations/rq/test_rq.py | 4 +- tests/integrations/sanic/test_sanic.py | 8 +- .../sqlalchemy/test_sqlalchemy.py | 4 +- .../integrations/threading/test_threading.py | 3 +- tests/integrations/tornado/test_tornado.py | 12 +- tests/test_ai_monitoring.py | 121 ++++++++++++++ tests/test_api.py | 28 +++- tests/test_basics.py | 96 +++++++++-- tests/test_client.py | 63 +++---- tests/test_metrics.py | 7 +- tests/test_sessions.py | 26 +-- tests/test_transport.py | 35 ++-- tests/tracing/test_integration_tests.py | 8 +- tests/tracing/test_misc.py | 12 +- tests/tracing/test_sampling.py | 5 +- tox.ini | 5 +- 98 files changed, 1022 insertions(+), 837 deletions(-) create mode 100644 constraints.txt delete mode 100644 sentry_sdk/integrations/opentelemetry/distro.py create mode 100644 tests/test_ai_monitoring.py diff --git a/.github/workflows/test-integrations-ai.yml b/.github/workflows/test-integrations-ai.yml index 8ae5d2f36c..2039a00b35 100644 --- a/.github/workflows/test-integrations-ai.yml +++ b/.github/workflows/test-integrations-ai.yml @@ -39,7 +39,7 @@ jobs: allow-prereleases: true - name: Setup Test Env run: | - pip install coverage tox + pip install "coverage[toml]" tox - name: Erase coverage run: | coverage erase @@ -101,7 +101,7 @@ jobs: allow-prereleases: true - name: Setup Test Env run: | - pip install coverage tox + pip install "coverage[toml]" tox - name: Erase coverage run: | coverage erase diff --git a/.github/workflows/test-integrations-aws-lambda.yml b/.github/workflows/test-integrations-aws-lambda.yml index bb64224293..119545c9f6 100644 --- a/.github/workflows/test-integrations-aws-lambda.yml +++ b/.github/workflows/test-integrations-aws-lambda.yml @@ -74,7 +74,7 @@ jobs: allow-prereleases: true - name: Setup Test Env run: | - pip install coverage tox + pip install "coverage[toml]" tox - name: Erase coverage run: | coverage erase diff --git a/.github/workflows/test-integrations-cloud-computing.yml b/.github/workflows/test-integrations-cloud-computing.yml index 8588f0cf89..531303bf52 100644 --- a/.github/workflows/test-integrations-cloud-computing.yml +++ b/.github/workflows/test-integrations-cloud-computing.yml @@ -39,7 +39,7 @@ jobs: allow-prereleases: true - name: Setup Test Env run: | - pip install coverage tox + pip install "coverage[toml]" tox - name: Erase coverage run: | coverage erase @@ -97,7 +97,7 @@ jobs: allow-prereleases: true - name: Setup Test Env run: | - pip install coverage tox + pip install "coverage[toml]" tox - name: Erase coverage run: | coverage erase diff --git a/.github/workflows/test-integrations-common.yml b/.github/workflows/test-integrations-common.yml index 90dbd03dd3..a32f300512 100644 --- a/.github/workflows/test-integrations-common.yml +++ b/.github/workflows/test-integrations-common.yml @@ -39,7 +39,7 @@ jobs: allow-prereleases: true - name: Setup Test Env run: | - pip install coverage tox + pip install "coverage[toml]" tox - name: Erase coverage run: | coverage erase diff --git a/.github/workflows/test-integrations-data-processing.yml b/.github/workflows/test-integrations-data-processing.yml index 48a0e6acf9..1585adb20e 100644 --- a/.github/workflows/test-integrations-data-processing.yml +++ b/.github/workflows/test-integrations-data-processing.yml @@ -41,7 +41,7 @@ jobs: uses: supercharge/redis-github-action@1.8.0 - name: Setup Test Env run: | - pip install coverage tox + pip install "coverage[toml]" tox - name: Erase coverage run: | coverage erase @@ -109,7 +109,7 @@ jobs: uses: supercharge/redis-github-action@1.8.0 - name: Setup Test Env run: | - pip install coverage tox + pip install "coverage[toml]" tox - name: Erase coverage run: | coverage erase diff --git a/.github/workflows/test-integrations-databases.yml b/.github/workflows/test-integrations-databases.yml index 2ce8835310..c547e1a9da 100644 --- a/.github/workflows/test-integrations-databases.yml +++ b/.github/workflows/test-integrations-databases.yml @@ -58,7 +58,7 @@ jobs: - uses: getsentry/action-clickhouse-in-ci@v1 - name: Setup Test Env run: | - pip install coverage tox + pip install "coverage[toml]" tox - name: Erase coverage run: | coverage erase @@ -143,7 +143,7 @@ jobs: - uses: getsentry/action-clickhouse-in-ci@v1 - name: Setup Test Env run: | - pip install coverage tox + pip install "coverage[toml]" tox - name: Erase coverage run: | coverage erase diff --git a/.github/workflows/test-integrations-graphql.yml b/.github/workflows/test-integrations-graphql.yml index 57ca59ac76..d5f78aaa89 100644 --- a/.github/workflows/test-integrations-graphql.yml +++ b/.github/workflows/test-integrations-graphql.yml @@ -39,7 +39,7 @@ jobs: allow-prereleases: true - name: Setup Test Env run: | - pip install coverage tox + pip install "coverage[toml]" tox - name: Erase coverage run: | coverage erase @@ -97,7 +97,7 @@ jobs: allow-prereleases: true - name: Setup Test Env run: | - pip install coverage tox + pip install "coverage[toml]" tox - name: Erase coverage run: | coverage erase diff --git a/.github/workflows/test-integrations-miscellaneous.yml b/.github/workflows/test-integrations-miscellaneous.yml index 21b43e33f8..71ee0a2f1c 100644 --- a/.github/workflows/test-integrations-miscellaneous.yml +++ b/.github/workflows/test-integrations-miscellaneous.yml @@ -39,7 +39,7 @@ jobs: allow-prereleases: true - name: Setup Test Env run: | - pip install coverage tox + pip install "coverage[toml]" tox - name: Erase coverage run: | coverage erase @@ -101,7 +101,7 @@ jobs: allow-prereleases: true - name: Setup Test Env run: | - pip install coverage tox + pip install "coverage[toml]" tox - name: Erase coverage run: | coverage erase diff --git a/.github/workflows/test-integrations-networking.yml b/.github/workflows/test-integrations-networking.yml index 8490e34aa6..295f6bcffc 100644 --- a/.github/workflows/test-integrations-networking.yml +++ b/.github/workflows/test-integrations-networking.yml @@ -39,7 +39,7 @@ jobs: allow-prereleases: true - name: Setup Test Env run: | - pip install coverage tox + pip install "coverage[toml]" tox - name: Erase coverage run: | coverage erase @@ -97,7 +97,7 @@ jobs: allow-prereleases: true - name: Setup Test Env run: | - pip install coverage tox + pip install "coverage[toml]" tox - name: Erase coverage run: | coverage erase diff --git a/.github/workflows/test-integrations-web-frameworks-1.yml b/.github/workflows/test-integrations-web-frameworks-1.yml index 6b9bb703bd..835dd724b3 100644 --- a/.github/workflows/test-integrations-web-frameworks-1.yml +++ b/.github/workflows/test-integrations-web-frameworks-1.yml @@ -57,7 +57,7 @@ jobs: allow-prereleases: true - name: Setup Test Env run: | - pip install coverage tox + pip install "coverage[toml]" tox - name: Erase coverage run: | coverage erase @@ -133,7 +133,7 @@ jobs: allow-prereleases: true - name: Setup Test Env run: | - pip install coverage tox + pip install "coverage[toml]" tox - name: Erase coverage run: | coverage erase diff --git a/.github/workflows/test-integrations-web-frameworks-2.yml b/.github/workflows/test-integrations-web-frameworks-2.yml index e95e267eda..37d00f8fbf 100644 --- a/.github/workflows/test-integrations-web-frameworks-2.yml +++ b/.github/workflows/test-integrations-web-frameworks-2.yml @@ -39,7 +39,7 @@ jobs: allow-prereleases: true - name: Setup Test Env run: | - pip install coverage tox + pip install "coverage[toml]" tox - name: Erase coverage run: | coverage erase @@ -117,7 +117,7 @@ jobs: allow-prereleases: true - name: Setup Test Env run: | - pip install coverage tox + pip install "coverage[toml]" tox - name: Erase coverage run: | coverage erase diff --git a/CHANGELOG.md b/CHANGELOG.md index 8d6050b50e..1f811b6d8c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,51 @@ # Changelog +## 2.11.0 + +### Various fixes & improvements + +- Add `disabled_integrations` (#3328) by @sentrivana + + Disabling individual integrations is now much easier. + Instead of disabling all automatically enabled integrations and specifying the ones + you want to keep, you can now use the new + [`disabled_integrations`](https://docs.sentry.io/platforms/python/configuration/options/#auto-enabling-integrations) + config option to provide a list of integrations to disable: + + ```python + import sentry_sdk + from sentry_sdk.integrations.flask import FlaskIntegration + + sentry_sdk.init( + # Do not use the Flask integration even if Flask is installed. + disabled_integrations=[ + FlaskIntegration(), + ], + ) + ``` + +- Use operation name as transaction name in Strawberry (#3294) by @sentrivana +- WSGI integrations respect `SCRIPT_NAME` env variable (#2622) by @sarvaSanjay +- Make Django DB spans have origin `auto.db.django` (#3319) by @antonpirker +- Sort breadcrumbs by time before sending (#3307) by @antonpirker +- Fix `KeyError('sentry-monitor-start-timestamp-s')` (#3278) by @Mohsen-Khodabakhshi +- Set MongoDB tags directly on span data (#3290) by @0Calories +- Lower logger level for some messages (#3305) by @sentrivana and @antonpirker +- Emit deprecation warnings from `Hub` API (#3280) by @szokeasaurusrex +- Clarify that `instrumenter` is internal-only (#3299) by @szokeasaurusrex +- Support Django 5.1 (#3207) by @sentrivana +- Remove apparently unnecessary `if` (#3298) by @szokeasaurusrex +- Preliminary support for Python 3.13 (#3200) by @sentrivana +- Move `sentry_sdk.init` out of `hub.py` (#3276) by @szokeasaurusrex +- Unhardcode integration list (#3240) by @rominf +- Allow passing of PostgreSQL port in tests (#3281) by @rominf +- Add tests for `@ai_track` decorator (#3325) by @colin-sentry +- Do not include type checking code in coverage report (#3327) by @antonpirker +- Fix test_installed_modules (#3309) by @szokeasaurusrex +- Fix typos and grammar in a comment (#3293) by @szokeasaurusrex +- Fixed failed tests setup (#3303) by @antonpirker +- Only assert warnings we are interested in (#3314) by @szokeasaurusrex + ## 2.10.0 ### Various fixes & improvements @@ -22,7 +68,7 @@ LangchainIntegration(tiktoken_encoding_name="cl100k_base"), ], ) - ``` + ``` - PyMongo: Send query description as valid JSON (#3291) by @0Calories - Remove Python 2 compatibility code (#3284) by @szokeasaurusrex @@ -137,7 +183,7 @@ This change fixes a regression in our cron monitoring feature, which caused cron ```python from sentry_sdk.integrations.starlette import StarletteIntegration from sentry_sdk.integrations.fastapi import FastApiIntegration - + sentry_sdk.init( # ... integrations=[ @@ -266,9 +312,9 @@ This change fixes a regression in our cron monitoring feature, which caused cron integrations=[AnthropicIntegration()], ) - client = Anthropic() + client = Anthropic() ``` - Check out [the Anthropic docs](https://docs.sentry.io/platforms/python/integrations/anthropic/) for details. + Check out [the Anthropic docs](https://docs.sentry.io/platforms/python/integrations/anthropic/) for details. - **New integration:** [Huggingface Hub](https://docs.sentry.io/platforms/python/integrations/huggingface/) (#3033) by @colin-sentry @@ -323,13 +369,13 @@ This change fixes a regression in our cron monitoring feature, which caused cron ## 2.0.0 -This is the first major update in a *long* time! +This is the first major update in a *long* time! We dropped support for some ancient languages and frameworks (Yes, Python 2.7 is no longer supported). Additionally we refactored a big part of the foundation of the SDK (how data inside the SDK is handled). We hope you like it! -For a shorter version of what you need to do, to upgrade to Sentry SDK 2.0 see: https://docs.sentry.io/platforms/python/migration/1.x-to-2.x +For a shorter version of what you need to do, to upgrade to Sentry SDK 2.0 see: https://docs.sentry.io/platforms/python/migration/1.x-to-2.x ### New Features @@ -369,7 +415,7 @@ For a shorter version of what you need to do, to upgrade to Sentry SDK 2.0 see: # later in the code execution: - scope = sentry_sdk.Scope.get_current_scope() + scope = sentry_sdk.get_current_scope() scope.set_transaction_name("new-transaction-name") ``` - The classes listed in the table below are now abstract base classes. Therefore, they can no longer be instantiated. Subclasses can only be instantiated if they implement all of the abstract methods. @@ -446,7 +492,7 @@ For a shorter version of what you need to do, to upgrade to Sentry SDK 2.0 see: # do something with the forked scope ``` -- `configure_scope` is deprecated. Use the new isolation scope directly via `Scope.get_isolation_scope()` instead. +- `configure_scope` is deprecated. Use the new isolation scope directly via `get_isolation_scope()` instead. Before: @@ -458,9 +504,9 @@ For a shorter version of what you need to do, to upgrade to Sentry SDK 2.0 see: After: ```python - from sentry_sdk.scope import Scope + from sentry_sdk import get_isolation_scope - scope = Scope.get_isolation_scope() + scope = get_isolation_scope() # do something with `scope` ``` @@ -517,7 +563,7 @@ This is the final 1.x release for the forseeable future. Development will contin "failure_issue_threshold": 5, "recovery_threshold": 5, } - + @monitor(monitor_slug='', monitor_config=monitor_config) def tell_the_world(): print('My scheduled task...') @@ -532,14 +578,14 @@ This is the final 1.x release for the forseeable future. Development will contin ```python import django.db.models.signals import sentry_sdk - + sentry_sdk.init( ... integrations=[ DjangoIntegration( ... signals_denylist=[ - django.db.models.signals.pre_init, + django.db.models.signals.pre_init, django.db.models.signals.post_init, ], ), @@ -562,7 +608,7 @@ This is the final 1.x release for the forseeable future. Development will contin tags["extra"] = "foo" del tags["release"] return True - + sentry_sdk.init( ... _experiments={ diff --git a/MIGRATION_GUIDE.md b/MIGRATION_GUIDE.md index 17a9186ff6..53396a37ba 100644 --- a/MIGRATION_GUIDE.md +++ b/MIGRATION_GUIDE.md @@ -42,7 +42,7 @@ Looking to upgrade from Sentry SDK 1.x to 2.x? Here's a comprehensive list of wh # later in the code execution: - scope = sentry_sdk.Scope.get_current_scope() + scope = sentry_sdk.get_current_scope() scope.set_transaction_name("new-transaction-name") ``` @@ -132,18 +132,18 @@ Looking to upgrade from Sentry SDK 1.x to 2.x? Here's a comprehensive list of wh After: ```python - from sentry_sdk.scope import Scope + from sentry_sdk import get_current_scope - scope = Scope.get_current_scope() + scope = get_current_scope() # do something with `scope` ``` Or: ```python - from sentry_sdk.scope import Scope + from sentry_sdk import get_isolation_scope - scope = Scope.get_isolation_scope() + scope = get_isolation_scope() # do something with `scope` ``` diff --git a/checkouts/data-schemas b/checkouts/data-schemas index 88273a9f80..6d2c435b8c 160000 --- a/checkouts/data-schemas +++ b/checkouts/data-schemas @@ -1 +1 @@ -Subproject commit 88273a9f80f9de4223471ed5d84447d0e5d03fd5 +Subproject commit 6d2c435b8ce3a67e2065f38374bb437f274d0a6c diff --git a/constraints.txt b/constraints.txt new file mode 100644 index 0000000000..697aca1388 --- /dev/null +++ b/constraints.txt @@ -0,0 +1,3 @@ +# Workaround for https://github.com/pypa/setuptools/issues/4519. +# Applies only for Django tests. +setuptools<72.0.0 diff --git a/docs/conf.py b/docs/conf.py index ed2fe5b452..fc485b9d9a 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -28,7 +28,7 @@ copyright = "2019-{}, Sentry Team and Contributors".format(datetime.now().year) author = "Sentry Team and Contributors" -release = "2.10.0" +release = "2.11.0" version = ".".join(release.split(".")[:2]) # The short X.Y version. diff --git a/pyproject.toml b/pyproject.toml index 20ee9680f7..273289b75f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -8,3 +8,7 @@ extend-exclude = ''' | .*_pb2_grpc.py # exclude autogenerated Protocol Buffer files anywhere in the project ) ''' +[tool.coverage.report] + exclude_also = [ + "if TYPE_CHECKING:", + ] diff --git a/scripts/split-tox-gh-actions/templates/test_group.jinja b/scripts/split-tox-gh-actions/templates/test_group.jinja index 39cb9bfe86..1ac281f8c6 100644 --- a/scripts/split-tox-gh-actions/templates/test_group.jinja +++ b/scripts/split-tox-gh-actions/templates/test_group.jinja @@ -61,8 +61,7 @@ - name: Setup Test Env run: | - pip install coverage tox - + pip install "coverage[toml]" tox - name: Erase coverage run: | coverage erase @@ -96,4 +95,4 @@ uses: codecov/test-results-action@v1 with: token: {% raw %}${{ secrets.CODECOV_TOKEN }}{% endraw %} - files: .junitxml \ No newline at end of file + files: .junitxml diff --git a/sentry_sdk/__init__.py b/sentry_sdk/__init__.py index f74c20a194..1c9cedec5f 100644 --- a/sentry_sdk/__init__.py +++ b/sentry_sdk/__init__.py @@ -1,26 +1,20 @@ -from sentry_sdk.hub import Hub from sentry_sdk.scope import Scope from sentry_sdk.transport import Transport, HttpTransport from sentry_sdk.client import Client -from sentry_sdk._init_implementation import init from sentry_sdk.api import * # noqa from sentry_sdk.consts import VERSION # noqa -from sentry_sdk.crons import monitor # noqa -from sentry_sdk.tracing import trace # noqa - __all__ = [ # noqa "Hub", "Scope", "Client", "Transport", "HttpTransport", - "init", "integrations", - "trace", # From sentry_sdk.api + "init", "add_breadcrumb", "capture_event", "capture_exception", @@ -30,6 +24,9 @@ "flush", "get_baggage", "get_client", + "get_global_scope", + "get_isolation_scope", + "get_current_scope", "get_current_span", "get_traceparent", "is_initialized", @@ -46,6 +43,8 @@ "set_user", "start_span", "start_transaction", + "trace", + "monitor", ] # Initialize the debug support after everything is loaded @@ -53,3 +52,6 @@ init_debug_support() del init_debug_support + +# circular imports +from sentry_sdk.hub import Hub diff --git a/sentry_sdk/_init_implementation.py b/sentry_sdk/_init_implementation.py index 382b82acac..256a69ee83 100644 --- a/sentry_sdk/_init_implementation.py +++ b/sentry_sdk/_init_implementation.py @@ -39,7 +39,7 @@ def _init(*args, **kwargs): This takes the same arguments as the client constructor. """ client = sentry_sdk.Client(*args, **kwargs) - sentry_sdk.Scope.get_global_scope().set_client(client) + sentry_sdk.get_global_scope().set_client(client) _check_python_deprecations() rv = _InitGuard(client) return rv diff --git a/sentry_sdk/ai/monitoring.py b/sentry_sdk/ai/monitoring.py index bd48ffa053..b8f6a8c79a 100644 --- a/sentry_sdk/ai/monitoring.py +++ b/sentry_sdk/ai/monitoring.py @@ -1,3 +1,4 @@ +import inspect from functools import wraps import sentry_sdk.utils @@ -26,8 +27,7 @@ def ai_track(description, **span_kwargs): # type: (str, Any) -> Callable[..., Any] def decorator(f): # type: (Callable[..., Any]) -> Callable[..., Any] - @wraps(f) - def wrapped(*args, **kwargs): + def sync_wrapped(*args, **kwargs): # type: (Any, Any) -> Any curr_pipeline = _ai_pipeline_name.get() op = span_kwargs.get("op", "ai.run" if curr_pipeline else "ai.pipeline") @@ -56,7 +56,39 @@ def wrapped(*args, **kwargs): _ai_pipeline_name.set(None) return res - return wrapped + async def async_wrapped(*args, **kwargs): + # type: (Any, Any) -> Any + curr_pipeline = _ai_pipeline_name.get() + op = span_kwargs.get("op", "ai.run" if curr_pipeline else "ai.pipeline") + + with start_span(description=description, op=op, **span_kwargs) as span: + for k, v in kwargs.pop("sentry_tags", {}).items(): + span.set_tag(k, v) + for k, v in kwargs.pop("sentry_data", {}).items(): + span.set_data(k, v) + if curr_pipeline: + span.set_data("ai.pipeline.name", curr_pipeline) + return await f(*args, **kwargs) + else: + _ai_pipeline_name.set(description) + try: + res = await f(*args, **kwargs) + except Exception as e: + event, hint = sentry_sdk.utils.event_from_exception( + e, + client_options=sentry_sdk.get_client().options, + mechanism={"type": "ai_monitoring", "handled": False}, + ) + sentry_sdk.capture_event(event, hint=hint) + raise e from None + finally: + _ai_pipeline_name.set(None) + return res + + if inspect.iscoroutinefunction(f): + return wraps(f)(async_wrapped) + else: + return wraps(f)(sync_wrapped) return decorator diff --git a/sentry_sdk/api.py b/sentry_sdk/api.py index 7766ab79a4..9c11031fbb 100644 --- a/sentry_sdk/api.py +++ b/sentry_sdk/api.py @@ -1,10 +1,15 @@ import inspect +import warnings from contextlib import contextmanager from sentry_sdk import tracing_utils, Client -from sentry_sdk._types import TYPE_CHECKING +from sentry_sdk._init_implementation import init from sentry_sdk.scope import Scope, _ScopeManager, new_scope, isolation_scope -from sentry_sdk.tracing import NoOpSpan, Transaction +from sentry_sdk.tracing import NoOpSpan, Transaction, trace +from sentry_sdk.crons import monitor + + +from sentry_sdk._types import TYPE_CHECKING if TYPE_CHECKING: from collections.abc import Mapping @@ -45,6 +50,7 @@ def overload(x): # When changing this, update __all__ in __init__.py too __all__ = [ + "init", "add_breadcrumb", "capture_event", "capture_exception", @@ -54,6 +60,9 @@ def overload(x): "flush", "get_baggage", "get_client", + "get_global_scope", + "get_isolation_scope", + "get_current_scope", "get_current_span", "get_traceparent", "is_initialized", @@ -70,6 +79,8 @@ def overload(x): "set_user", "start_span", "start_transaction", + "trace", + "monitor", ] @@ -91,6 +102,12 @@ def clientmethod(f): return f +@scopemethod +def get_client(): + # type: () -> BaseClient + return Scope.get_client() + + def is_initialized(): # type: () -> bool """ @@ -102,13 +119,35 @@ def is_initialized(): (meaning it is configured to send data) then Sentry is initialized. """ - return Scope.get_client().is_active() + return get_client().is_active() @scopemethod -def get_client(): - # type: () -> BaseClient - return Scope.get_client() +def get_global_scope(): + # type: () -> Scope + return Scope.get_global_scope() + + +@scopemethod +def get_isolation_scope(): + # type: () -> Scope + return Scope.get_isolation_scope() + + +@scopemethod +def get_current_scope(): + # type: () -> Scope + return Scope.get_current_scope() + + +@scopemethod +def last_event_id(): + # type: () -> Optional[str] + """ + See :py:meth:`sentry_sdk.Scope.last_event_id` documentation regarding + this method's limitations. + """ + return Scope.last_event_id() @scopemethod @@ -119,9 +158,7 @@ def capture_event( **scope_kwargs, # type: Any ): # type: (...) -> Optional[str] - return Scope.get_current_scope().capture_event( - event, hint, scope=scope, **scope_kwargs - ) + return get_current_scope().capture_event(event, hint, scope=scope, **scope_kwargs) @scopemethod @@ -132,7 +169,7 @@ def capture_message( **scope_kwargs, # type: Any ): # type: (...) -> Optional[str] - return Scope.get_current_scope().capture_message( + return get_current_scope().capture_message( message, level, scope=scope, **scope_kwargs ) @@ -144,9 +181,7 @@ def capture_exception( **scope_kwargs, # type: Any ): # type: (...) -> Optional[str] - return Scope.get_current_scope().capture_exception( - error, scope=scope, **scope_kwargs - ) + return get_current_scope().capture_exception(error, scope=scope, **scope_kwargs) @scopemethod @@ -156,7 +191,7 @@ def add_breadcrumb( **kwargs, # type: Any ): # type: (...) -> None - return Scope.get_isolation_scope().add_breadcrumb(crumb, hint, **kwargs) + return get_isolation_scope().add_breadcrumb(crumb, hint, **kwargs) @overload @@ -184,7 +219,15 @@ def configure_scope( # noqa: F811 :returns: If no callback is provided, returns a context manager that returns the scope. """ - scope = Scope.get_isolation_scope() + warnings.warn( + "sentry_sdk.configure_scope is deprecated and will be removed in the next major version. " + "Please consult our migration guide to learn how to migrate to the new API: " + "https://docs.sentry.io/platforms/python/migration/1.x-to-2.x#scope-configuring", + DeprecationWarning, + stacklevel=2, + ) + + scope = get_isolation_scope() scope.generate_propagation_context() if callback is not None: @@ -228,9 +271,19 @@ def push_scope( # noqa: F811 :returns: If no `callback` is provided, a context manager that should be used to pop the scope again. """ + warnings.warn( + "sentry_sdk.push_scope is deprecated and will be removed in the next major version. " + "Please consult our migration guide to learn how to migrate to the new API: " + "https://docs.sentry.io/platforms/python/migration/1.x-to-2.x#scope-pushing", + DeprecationWarning, + stacklevel=2, + ) + if callback is not None: - with push_scope() as scope: - callback(scope) + with warnings.catch_warnings(): + warnings.simplefilter("ignore", DeprecationWarning) + with push_scope() as scope: + callback(scope) return None return _ScopeManager() @@ -239,37 +292,37 @@ def push_scope( # noqa: F811 @scopemethod def set_tag(key, value): # type: (str, Any) -> None - return Scope.get_isolation_scope().set_tag(key, value) + return get_isolation_scope().set_tag(key, value) @scopemethod def set_tags(tags): # type: (Mapping[str, object]) -> None - Scope.get_isolation_scope().set_tags(tags) + return get_isolation_scope().set_tags(tags) @scopemethod def set_context(key, value): # type: (str, Dict[str, Any]) -> None - return Scope.get_isolation_scope().set_context(key, value) + return get_isolation_scope().set_context(key, value) @scopemethod def set_extra(key, value): # type: (str, Any) -> None - return Scope.get_isolation_scope().set_extra(key, value) + return get_isolation_scope().set_extra(key, value) @scopemethod def set_user(value): # type: (Optional[Dict[str, Any]]) -> None - return Scope.get_isolation_scope().set_user(value) + return get_isolation_scope().set_user(value) @scopemethod def set_level(value): # type: (LogLevelStr) -> None - return Scope.get_isolation_scope().set_level(value) + return get_isolation_scope().set_level(value) @clientmethod @@ -278,7 +331,7 @@ def flush( callback=None, # type: Optional[Callable[[int, float], None]] ): # type: (...) -> None - return Scope.get_client().flush(timeout=timeout, callback=callback) + return get_client().flush(timeout=timeout, callback=callback) @scopemethod @@ -286,7 +339,7 @@ def start_span( **kwargs, # type: Any ): # type: (...) -> Span - return Scope.get_current_scope().start_span(**kwargs) + return get_current_scope().start_span(**kwargs) @scopemethod @@ -325,24 +378,14 @@ def start_transaction( constructor. See :py:class:`sentry_sdk.tracing.Transaction` for available arguments. """ - return Scope.get_current_scope().start_transaction( + return get_current_scope().start_transaction( transaction, custom_sampling_context, **kwargs ) -@scopemethod -def last_event_id(): - # type: () -> Optional[str] - """ - See :py:meth:`sentry_sdk.Scope.last_event_id` documentation regarding - this method's limitations. - """ - return Scope.last_event_id() - - def set_measurement(name, value, unit=""): # type: (str, float, MeasurementUnit) -> None - transaction = Scope.get_current_scope().transaction + transaction = get_current_scope().transaction if transaction is not None: transaction.set_measurement(name, value, unit) @@ -360,7 +403,7 @@ def get_traceparent(): """ Returns the traceparent either from the active span or from the scope. """ - return Scope.get_current_scope().get_traceparent() + return get_current_scope().get_traceparent() def get_baggage(): @@ -368,7 +411,7 @@ def get_baggage(): """ Returns Baggage either from the active span or from the scope. """ - baggage = Scope.get_current_scope().get_baggage() + baggage = get_current_scope().get_baggage() if baggage is not None: return baggage.serialize() @@ -382,6 +425,6 @@ def continue_trace( """ Sets the propagation context from environment or headers and returns a transaction. """ - return Scope.get_isolation_scope().continue_trace( + return get_isolation_scope().continue_trace( environ_or_headers, op, name, source, origin ) diff --git a/sentry_sdk/attachments.py b/sentry_sdk/attachments.py index 6bb8a61514..649c4f175b 100644 --- a/sentry_sdk/attachments.py +++ b/sentry_sdk/attachments.py @@ -9,6 +9,25 @@ class Attachment: + """Additional files/data to send along with an event. + + This class stores attachments that can be sent along with an event. Attachments are files or other data, e.g. + config or log files, that are relevant to an event. Attachments are set on the ``Scope``, and are sent along with + all non-transaction events (or all events including transactions if ``add_to_transactions`` is ``True``) that are + captured within the ``Scope``. + + To add an attachment to a ``Scope``, use :py:meth:`sentry_sdk.Scope.add_attachment`. The parameters for + ``add_attachment`` are the same as the parameters for this class's constructor. + + :param bytes: Raw bytes of the attachment, or a function that returns the raw bytes. Must be provided unless + ``path`` is provided. + :param filename: The filename of the attachment. Must be provided unless ``path`` is provided. + :param path: Path to a file to attach. Must be provided unless ``bytes`` is provided. + :param content_type: The content type of the attachment. If not provided, it will be guessed from the ``filename`` + parameter, if available, or the ``path`` parameter if ``filename`` is ``None``. + :param add_to_transactions: Whether to add this attachment to transactions. Defaults to ``False``. + """ + def __init__( self, bytes=None, # type: Union[None, bytes, Callable[[], bytes]] diff --git a/sentry_sdk/client.py b/sentry_sdk/client.py index cfb2352305..edc7b6f7a1 100644 --- a/sentry_sdk/client.py +++ b/sentry_sdk/client.py @@ -267,7 +267,6 @@ def _setup_instrumentation(self, functions_to_trace): function_obj = getattr(module_obj, function_name) setattr(module_obj, function_name, trace(function_obj)) logger.debug("Enabled tracing for %s", function_qualname) - except module_not_found_error: try: # Try to import a class @@ -367,6 +366,7 @@ def _capture_envelope(envelope): with_auto_enabling_integrations=self.options[ "auto_enabling_integrations" ], + disabled_integrations=self.options["disabled_integrations"], ) self.spotlight = None diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py index 03d8ce730d..cbeb7654b5 100644 --- a/sentry_sdk/consts.py +++ b/sentry_sdk/consts.py @@ -32,8 +32,6 @@ class EndpointType(Enum): from typing import Tuple from typing_extensions import TypedDict - from sentry_sdk.integrations import Integration - from sentry_sdk._types import ( BreadcrumbProcessor, ContinuousProfilerMode, @@ -482,7 +480,7 @@ def __init__( environment=None, # type: Optional[str] server_name=None, # type: Optional[str] shutdown_timeout=2, # type: float - integrations=[], # type: Sequence[Integration] # noqa: B006 + integrations=[], # type: Sequence[sentry_sdk.integrations.Integration] # noqa: B006 in_app_include=[], # type: List[str] # noqa: B006 in_app_exclude=[], # type: List[str] # noqa: B006 default_integrations=True, # type: bool @@ -509,6 +507,7 @@ def __init__( profiles_sampler=None, # type: Optional[TracesSampler] profiler_mode=None, # type: Optional[ProfilerMode] auto_enabling_integrations=True, # type: bool + disabled_integrations=None, # type: Optional[Sequence[sentry_sdk.integrations.Integration]] auto_session_tracking=True, # type: bool send_client_reports=True, # type: bool _experiments={}, # type: Experiments # noqa: B006 @@ -556,4 +555,4 @@ def _get_default_options(): del _get_default_options -VERSION = "2.10.0" +VERSION = "2.11.0" diff --git a/sentry_sdk/debug.py b/sentry_sdk/debug.py index e30b471698..e4c686a3e8 100644 --- a/sentry_sdk/debug.py +++ b/sentry_sdk/debug.py @@ -2,8 +2,8 @@ import logging import warnings +from sentry_sdk import get_client from sentry_sdk.client import _client_init_debug -from sentry_sdk.scope import Scope from sentry_sdk.utils import logger from logging import LogRecord @@ -14,7 +14,7 @@ def filter(self, record): if _client_init_debug.get(False): return True - return Scope.get_client().options["debug"] + return get_client().options["debug"] def init_debug_support(): diff --git a/sentry_sdk/envelope.py b/sentry_sdk/envelope.py index 44cce52410..6bb1eb22c7 100644 --- a/sentry_sdk/envelope.py +++ b/sentry_sdk/envelope.py @@ -189,9 +189,7 @@ def get_bytes(self): self.bytes = f.read() elif self.json is not None: self.bytes = json_dumps(self.json) - else: - self.bytes = b"" - return self.bytes + return self.bytes or b"" @property def inferred_content_type(self): diff --git a/sentry_sdk/hub.py b/sentry_sdk/hub.py index 2341214882..1493c53992 100644 --- a/sentry_sdk/hub.py +++ b/sentry_sdk/hub.py @@ -5,8 +5,14 @@ from contextlib import contextmanager +from sentry_sdk import ( + get_client, + get_global_scope, + get_isolation_scope, + get_current_scope, +) from sentry_sdk._compat import with_metaclass -from sentry_sdk.scope import Scope, _ScopeManager +from sentry_sdk.scope import _ScopeManager from sentry_sdk.client import Client from sentry_sdk.tracing import ( NoOpSpan, @@ -37,6 +43,7 @@ from typing_extensions import Unpack + from sentry_sdk.scope import Scope from sentry_sdk.client import BaseClient from sentry_sdk.integrations import Integration from sentry_sdk._types import ( @@ -142,23 +149,23 @@ def __init__( current_scope = None if isinstance(client_or_hub, Hub): - client = Scope.get_client() + client = get_client() if scope is None: # hub cloning is going on, we use a fork of the current/isolation scope for context manager - scope = Scope.get_isolation_scope().fork() - current_scope = Scope.get_current_scope().fork() + scope = get_isolation_scope().fork() + current_scope = get_current_scope().fork() else: client = client_or_hub # type: ignore - Scope.get_global_scope().set_client(client) + get_global_scope().set_client(client) if scope is None: # so there is no Hub cloning going on # just the current isolation scope is used for context manager - scope = Scope.get_isolation_scope() - current_scope = Scope.get_current_scope() + scope = get_isolation_scope() + current_scope = get_current_scope() if current_scope is None: # just the current current scope is used for context manager - current_scope = Scope.get_current_scope() + current_scope = get_current_scope() self._stack = [(client, scope)] # type: ignore self._last_event_id = None # type: Optional[str] @@ -174,11 +181,11 @@ def __enter__(self): self._old_hubs.append(Hub.current) _local.set(self) - current_scope = Scope.get_current_scope() + current_scope = get_current_scope() self._old_current_scopes.append(current_scope) scope._current_scope.set(self._current_scope) - isolation_scope = Scope.get_isolation_scope() + isolation_scope = get_isolation_scope() self._old_isolation_scopes.append(isolation_scope) scope._isolation_scope.set(self._scope) @@ -230,7 +237,7 @@ def get_integration( If the return value is not `None` the hub is guaranteed to have a client attached. """ - return Scope.get_client().get_integration(name_or_class) + return get_client().get_integration(name_or_class) @property def client(self): @@ -242,7 +249,7 @@ def client(self): Returns the current client on the hub. """ - client = Scope.get_client() + client = get_client() if not client.is_active(): return None @@ -257,7 +264,7 @@ def scope(self): This property is deprecated and will be removed in a future release. Returns the current scope on the hub. """ - return Scope.get_isolation_scope() + return get_isolation_scope() def last_event_id(self): # type: () -> Optional[str] @@ -283,7 +290,7 @@ def bind_client( Binds a new client to the hub. """ - Scope.get_global_scope().set_client(new) + get_global_scope().set_client(new) def capture_event(self, event, hint=None, scope=None, **scope_kwargs): # type: (Event, Optional[Hint], Optional[Scope], Any) -> Optional[str] @@ -307,7 +314,7 @@ def capture_event(self, event, hint=None, scope=None, **scope_kwargs): For supported `**scope_kwargs` see :py:meth:`sentry_sdk.Scope.update_from_kwargs`. The `scope` and `scope_kwargs` parameters are mutually exclusive. """ - last_event_id = Scope.get_current_scope().capture_event( + last_event_id = get_current_scope().capture_event( event, hint, scope=scope, **scope_kwargs ) @@ -341,7 +348,7 @@ def capture_message(self, message, level=None, scope=None, **scope_kwargs): :returns: An `event_id` if the SDK decided to send the event (see :py:meth:`sentry_sdk.client._Client.capture_event`). """ - last_event_id = Scope.get_current_scope().capture_message( + last_event_id = get_current_scope().capture_message( message, level=level, scope=scope, **scope_kwargs ) @@ -372,7 +379,7 @@ def capture_exception(self, error=None, scope=None, **scope_kwargs): :returns: An `event_id` if the SDK decided to send the event (see :py:meth:`sentry_sdk.client._Client.capture_event`). """ - last_event_id = Scope.get_current_scope().capture_exception( + last_event_id = get_current_scope().capture_exception( error, scope=scope, **scope_kwargs ) @@ -395,7 +402,7 @@ def add_breadcrumb(self, crumb=None, hint=None, **kwargs): :param hint: An optional value that can be used by `before_breadcrumb` to customize the breadcrumbs that are emitted. """ - Scope.get_isolation_scope().add_breadcrumb(crumb, hint, **kwargs) + get_isolation_scope().add_breadcrumb(crumb, hint, **kwargs) def start_span(self, **kwargs): # type: (Any) -> Span @@ -418,7 +425,7 @@ def start_span(self, **kwargs): For supported `**kwargs` see :py:class:`sentry_sdk.tracing.Span`. """ - scope = Scope.get_current_scope() + scope = get_current_scope() return scope.start_span(**kwargs) def start_transaction( @@ -453,7 +460,7 @@ def start_transaction( For supported `**kwargs` see :py:class:`sentry_sdk.tracing.Transaction`. """ - scope = Scope.get_current_scope() + scope = get_current_scope() # For backwards compatibility, we allow passing the scope as the hub. # We need a major release to make this nice. (if someone searches the code: deprecated) @@ -471,7 +478,7 @@ def continue_trace(self, environ_or_headers, op=None, name=None, source=None): Sets the propagation context from environment or headers and returns a transaction. """ - return Scope.get_isolation_scope().continue_trace( + return get_isolation_scope().continue_trace( environ_or_headers=environ_or_headers, op=op, name=name, source=source ) @@ -558,7 +565,7 @@ def configure_scope( # noqa :returns: If no callback is provided, returns a context manager that returns the scope. """ - scope = Scope.get_isolation_scope() + scope = get_isolation_scope() if continue_trace: scope.generate_propagation_context() @@ -587,7 +594,7 @@ def start_session( Starts a new session. """ - Scope.get_isolation_scope().start_session( + get_isolation_scope().start_session( session_mode=session_mode, ) @@ -600,7 +607,7 @@ def end_session(self): Ends the current session if there is one. """ - Scope.get_isolation_scope().end_session() + get_isolation_scope().end_session() def stop_auto_session_tracking(self): # type: (...) -> None @@ -614,7 +621,7 @@ def stop_auto_session_tracking(self): This temporarily session tracking for the current scope when called. To resume session tracking call `resume_auto_session_tracking`. """ - Scope.get_isolation_scope().stop_auto_session_tracking() + get_isolation_scope().stop_auto_session_tracking() def resume_auto_session_tracking(self): # type: (...) -> None @@ -627,7 +634,7 @@ def resume_auto_session_tracking(self): disabled earlier. This requires that generally automatic session tracking is enabled. """ - Scope.get_isolation_scope().resume_auto_session_tracking() + get_isolation_scope().resume_auto_session_tracking() def flush( self, @@ -642,7 +649,7 @@ def flush( Alias for :py:meth:`sentry_sdk.client._Client.flush` """ - return Scope.get_client().flush(timeout=timeout, callback=callback) + return get_client().flush(timeout=timeout, callback=callback) def get_traceparent(self): # type: () -> Optional[str] @@ -653,11 +660,11 @@ def get_traceparent(self): Returns the traceparent either from the active span or from the scope. """ - current_scope = Scope.get_current_scope() + current_scope = get_current_scope() traceparent = current_scope.get_traceparent() if traceparent is None: - isolation_scope = Scope.get_isolation_scope() + isolation_scope = get_isolation_scope() traceparent = isolation_scope.get_traceparent() return traceparent @@ -671,11 +678,11 @@ def get_baggage(self): Returns Baggage either from the active span or from the scope. """ - current_scope = Scope.get_current_scope() + current_scope = get_current_scope() baggage = current_scope.get_baggage() if baggage is None: - isolation_scope = Scope.get_isolation_scope() + isolation_scope = get_isolation_scope() baggage = isolation_scope.get_baggage() if baggage is not None: @@ -694,7 +701,7 @@ def iter_trace_propagation_headers(self, span=None): from the span representing the request, if available, or the current span on the scope if not. """ - return Scope.get_current_scope().iter_trace_propagation_headers( + return get_current_scope().iter_trace_propagation_headers( span=span, ) @@ -713,7 +720,7 @@ def trace_propagation_meta(self, span=None): "The parameter `span` in trace_propagation_meta() is deprecated and will be removed in the future." ) - return Scope.get_current_scope().trace_propagation_meta( + return get_current_scope().trace_propagation_meta( span=span, ) diff --git a/sentry_sdk/integrations/__init__.py b/sentry_sdk/integrations/__init__.py index 9e3b11f318..3c43ed5472 100644 --- a/sentry_sdk/integrations/__init__.py +++ b/sentry_sdk/integrations/__init__.py @@ -6,10 +6,12 @@ if TYPE_CHECKING: + from collections.abc import Sequence from typing import Callable from typing import Dict from typing import Iterator from typing import List + from typing import Optional from typing import Set from typing import Type @@ -114,14 +116,20 @@ def iter_default_integrations(with_auto_enabling_integrations): def setup_integrations( - integrations, with_defaults=True, with_auto_enabling_integrations=False + integrations, + with_defaults=True, + with_auto_enabling_integrations=False, + disabled_integrations=None, ): - # type: (List[Integration], bool, bool) -> Dict[str, Integration] + # type: (Sequence[Integration], bool, bool, Optional[Sequence[Integration]]) -> Dict[str, Integration] """ Given a list of integration instances, this installs them all. When `with_defaults` is set to `True` all default integrations are added unless they were already provided before. + + `disabled_integrations` takes precedence over `with_defaults` and + `with_auto_enabling_integrations`. """ integrations = dict( (integration.identifier, integration) for integration in integrations or () @@ -129,6 +137,12 @@ def setup_integrations( logger.debug("Setting up integrations (with default = %s)", with_defaults) + # Integrations that will not be enabled + disabled_integrations = [ + integration if isinstance(integration, type) else type(integration) + for integration in disabled_integrations or [] + ] + # Integrations that are not explicitly set up by the user. used_as_default_integration = set() @@ -144,20 +158,23 @@ def setup_integrations( for identifier, integration in integrations.items(): with _installer_lock: if identifier not in _processed_integrations: - logger.debug( - "Setting up previously not enabled integration %s", identifier - ) - try: - type(integration).setup_once() - except DidNotEnable as e: - if identifier not in used_as_default_integration: - raise - + if type(integration) in disabled_integrations: + logger.debug("Ignoring integration %s", identifier) + else: logger.debug( - "Did not enable default integration %s: %s", identifier, e + "Setting up previously not enabled integration %s", identifier ) - else: - _installed_integrations.add(identifier) + try: + type(integration).setup_once() + except DidNotEnable as e: + if identifier not in used_as_default_integration: + raise + + logger.debug( + "Did not enable default integration %s: %s", identifier, e + ) + else: + _installed_integrations.add(identifier) _processed_integrations.add(identifier) diff --git a/sentry_sdk/integrations/aiohttp.py b/sentry_sdk/integrations/aiohttp.py index 41cf837187..6da340f31c 100644 --- a/sentry_sdk/integrations/aiohttp.py +++ b/sentry_sdk/integrations/aiohttp.py @@ -6,7 +6,6 @@ from sentry_sdk.consts import OP, SPANSTATUS, SPANDATA from sentry_sdk.integrations import Integration, DidNotEnable from sentry_sdk.integrations.logging import ignore_logger -from sentry_sdk.scope import Scope from sentry_sdk.sessions import auto_session_tracking_scope from sentry_sdk.integrations._wsgi_common import ( _filter_headers, @@ -166,7 +165,7 @@ async def sentry_urldispatcher_resolve(self, request): pass if name is not None: - Scope.get_current_scope().set_transaction_name( + sentry_sdk.get_current_scope().set_transaction_name( name, source=SOURCE_FOR_STYLE[integration.transaction_style], ) @@ -219,7 +218,10 @@ async def on_request_start(session, trace_config_ctx, params): client = sentry_sdk.get_client() if should_propagate_trace(client, str(params.url)): - for key, value in Scope.get_current_scope().iter_trace_propagation_headers( + for ( + key, + value, + ) in sentry_sdk.get_current_scope().iter_trace_propagation_headers( span=span ): logger.debug( diff --git a/sentry_sdk/integrations/ariadne.py b/sentry_sdk/integrations/ariadne.py index 86407408a6..c58caec8f0 100644 --- a/sentry_sdk/integrations/ariadne.py +++ b/sentry_sdk/integrations/ariadne.py @@ -1,10 +1,11 @@ from importlib import import_module +import sentry_sdk from sentry_sdk import get_client, capture_event from sentry_sdk.integrations import DidNotEnable, Integration from sentry_sdk.integrations.logging import ignore_logger from sentry_sdk.integrations._wsgi_common import request_body_within_bounds -from sentry_sdk.scope import Scope, should_send_default_pii +from sentry_sdk.scope import should_send_default_pii from sentry_sdk.utils import ( capture_internal_exceptions, ensure_integration_enabled, @@ -57,7 +58,7 @@ def _patch_graphql(): def _sentry_patched_parse_query(context_value, query_parser, data): # type: (Optional[Any], Optional[QueryParser], Any) -> DocumentNode event_processor = _make_request_event_processor(data) - Scope.get_isolation_scope().add_event_processor(event_processor) + sentry_sdk.get_isolation_scope().add_event_processor(event_processor) result = old_parse_query(context_value, query_parser, data) return result @@ -68,7 +69,7 @@ def _sentry_patched_handle_graphql_errors(errors, *args, **kwargs): result = old_handle_errors(errors, *args, **kwargs) event_processor = _make_response_event_processor(result[1]) - Scope.get_isolation_scope().add_event_processor(event_processor) + sentry_sdk.get_isolation_scope().add_event_processor(event_processor) client = get_client() if client.is_active(): @@ -92,7 +93,7 @@ def _sentry_patched_handle_query_result(result, *args, **kwargs): query_result = old_handle_query_result(result, *args, **kwargs) event_processor = _make_response_event_processor(query_result[1]) - Scope.get_isolation_scope().add_event_processor(event_processor) + sentry_sdk.get_isolation_scope().add_event_processor(event_processor) client = get_client() if client.is_active(): diff --git a/sentry_sdk/integrations/arq.py b/sentry_sdk/integrations/arq.py index 881722b457..c347ec5138 100644 --- a/sentry_sdk/integrations/arq.py +++ b/sentry_sdk/integrations/arq.py @@ -5,7 +5,7 @@ from sentry_sdk.consts import OP, SPANSTATUS from sentry_sdk.integrations import DidNotEnable, Integration from sentry_sdk.integrations.logging import ignore_logger -from sentry_sdk.scope import Scope, should_send_default_pii +from sentry_sdk.scope import should_send_default_pii from sentry_sdk.tracing import Transaction, TRANSACTION_SOURCE_TASK from sentry_sdk.utils import ( capture_internal_exceptions, @@ -115,7 +115,7 @@ async def _sentry_run_job(self, job_id, score): def _capture_exception(exc_info): # type: (ExcInfo) -> None - scope = Scope.get_current_scope() + scope = sentry_sdk.get_current_scope() if scope.transaction is not None: if exc_info[0] in ARQ_CONTROL_FLOW_EXCEPTIONS: @@ -126,7 +126,7 @@ def _capture_exception(exc_info): event, hint = event_from_exception( exc_info, - client_options=Scope.get_client().options, + client_options=sentry_sdk.get_client().options, mechanism={"type": ArqIntegration.identifier, "handled": False}, ) sentry_sdk.capture_event(event, hint=hint) @@ -138,7 +138,7 @@ def event_processor(event, hint): # type: (Event, Hint) -> Optional[Event] with capture_internal_exceptions(): - scope = Scope.get_current_scope() + scope = sentry_sdk.get_current_scope() if scope.transaction is not None: scope.transaction.name = ctx["job_name"] event["transaction"] = ctx["job_name"] @@ -172,7 +172,7 @@ async def _sentry_coroutine(ctx, *args, **kwargs): if integration is None: return await coroutine(ctx, *args, **kwargs) - Scope.get_isolation_scope().add_event_processor( + sentry_sdk.get_isolation_scope().add_event_processor( _make_event_processor({**ctx, "job_name": name}, *args, **kwargs) ) diff --git a/sentry_sdk/integrations/atexit.py b/sentry_sdk/integrations/atexit.py index d11e35fafa..9babbf235d 100644 --- a/sentry_sdk/integrations/atexit.py +++ b/sentry_sdk/integrations/atexit.py @@ -3,7 +3,6 @@ import atexit import sentry_sdk -from sentry_sdk import Scope from sentry_sdk.utils import logger from sentry_sdk.integrations import Integration from sentry_sdk.utils import ensure_integration_enabled @@ -52,5 +51,5 @@ def _shutdown(): integration = client.get_integration(AtexitIntegration) logger.debug("atexit: shutting down client") - Scope.get_isolation_scope().end_session() + sentry_sdk.get_isolation_scope().end_session() client.close(callback=integration.callback) diff --git a/sentry_sdk/integrations/aws_lambda.py b/sentry_sdk/integrations/aws_lambda.py index 3c909ad9af..560511b48b 100644 --- a/sentry_sdk/integrations/aws_lambda.py +++ b/sentry_sdk/integrations/aws_lambda.py @@ -6,7 +6,7 @@ import sentry_sdk from sentry_sdk.api import continue_trace from sentry_sdk.consts import OP -from sentry_sdk.scope import Scope, should_send_default_pii +from sentry_sdk.scope import should_send_default_pii from sentry_sdk.tracing import TRANSACTION_SOURCE_COMPONENT from sentry_sdk.utils import ( AnnotatedValue, @@ -44,7 +44,7 @@ def sentry_init_error(*args, **kwargs): client = sentry_sdk.get_client() with capture_internal_exceptions(): - Scope.get_isolation_scope().clear_breadcrumbs() + sentry_sdk.get_isolation_scope().clear_breadcrumbs() exc_info = sys.exc_info() if exc_info and all(exc_info): diff --git a/sentry_sdk/integrations/bottle.py b/sentry_sdk/integrations/bottle.py index f6dc454478..c5dca2f822 100644 --- a/sentry_sdk/integrations/bottle.py +++ b/sentry_sdk/integrations/bottle.py @@ -10,7 +10,6 @@ from sentry_sdk.integrations import Integration, DidNotEnable from sentry_sdk.integrations.wsgi import SentryWsgiMiddleware from sentry_sdk.integrations._wsgi_common import RequestExtractor -from sentry_sdk.scope import Scope from sentry_sdk._types import TYPE_CHECKING if TYPE_CHECKING: @@ -86,7 +85,7 @@ def _patched_handle(self, environ): # type: (Bottle, Dict[str, Any]) -> Any integration = sentry_sdk.get_client().get_integration(BottleIntegration) - scope = Scope.get_isolation_scope() + scope = sentry_sdk.get_isolation_scope() scope._name = "bottle" scope.add_event_processor( _make_request_event_processor(self, bottle_request, integration) diff --git a/sentry_sdk/integrations/celery/__init__.py b/sentry_sdk/integrations/celery/__init__.py index fa40565a62..e1b54d0a37 100644 --- a/sentry_sdk/integrations/celery/__init__.py +++ b/sentry_sdk/integrations/celery/__init__.py @@ -16,7 +16,6 @@ from sentry_sdk.integrations.logging import ignore_logger from sentry_sdk.tracing import BAGGAGE_HEADER_NAME, TRANSACTION_SOURCE_TASK from sentry_sdk._types import TYPE_CHECKING -from sentry_sdk.scope import Scope from sentry_sdk.tracing_utils import Baggage from sentry_sdk.utils import ( capture_internal_exceptions, @@ -100,7 +99,7 @@ def setup_once(): def _set_status(status): # type: (str) -> None with capture_internal_exceptions(): - scope = Scope.get_current_scope() + scope = sentry_sdk.get_current_scope() if scope.span is not None: scope.span.set_status(status) @@ -170,7 +169,7 @@ def _update_celery_task_headers(original_headers, span, monitor_beat_tasks): # if span is None (when the task was started by Celery Beat) # this will return the trace headers from the scope. headers = dict( - Scope.get_isolation_scope().iter_trace_propagation_headers(span=span) + sentry_sdk.get_isolation_scope().iter_trace_propagation_headers(span=span) ) if monitor_beat_tasks: @@ -262,9 +261,7 @@ def apply_async(*args, **kwargs): task = args[0] - task_started_from_beat = ( - sentry_sdk.Scope.get_isolation_scope()._name == "celery-beat" - ) + task_started_from_beat = sentry_sdk.get_isolation_scope()._name == "celery-beat" span_mgr = ( sentry_sdk.start_span( diff --git a/sentry_sdk/integrations/celery/beat.py b/sentry_sdk/integrations/celery/beat.py index 6264d58804..b40c39fa80 100644 --- a/sentry_sdk/integrations/celery/beat.py +++ b/sentry_sdk/integrations/celery/beat.py @@ -6,7 +6,6 @@ _now_seconds_since_epoch, ) from sentry_sdk._types import TYPE_CHECKING -from sentry_sdk.scope import Scope from sentry_sdk.utils import ( logger, match_regex_list, @@ -185,7 +184,7 @@ def sentry_patched_scheduler(*args, **kwargs): return original_function(*args, **kwargs) # Tasks started by Celery Beat start a new Trace - scope = Scope.get_isolation_scope() + scope = sentry_sdk.get_isolation_scope() scope.set_new_propagation_context() scope._name = "celery-beat" diff --git a/sentry_sdk/integrations/django/__init__.py b/sentry_sdk/integrations/django/__init__.py index 253fce1745..508df2e431 100644 --- a/sentry_sdk/integrations/django/__init__.py +++ b/sentry_sdk/integrations/django/__init__.py @@ -8,7 +8,7 @@ from sentry_sdk._types import TYPE_CHECKING from sentry_sdk.consts import OP, SPANDATA from sentry_sdk.db.explain_plan.django import attach_explain_plan_to_span -from sentry_sdk.scope import Scope, add_global_event_processor, should_send_default_pii +from sentry_sdk.scope import add_global_event_processor, should_send_default_pii from sentry_sdk.serializer import add_global_repr_processor from sentry_sdk.tracing import SOURCE_FOR_STYLE, TRANSACTION_SOURCE_URL from sentry_sdk.tracing_utils import add_query_source, record_sql_queries @@ -371,7 +371,7 @@ def _patch_django_asgi_handler(): def _set_transaction_name_and_source(scope, transaction_style, request): - # type: (Scope, str, WSGIRequest) -> None + # type: (sentry_sdk.Scope, str, WSGIRequest) -> None try: transaction_name = None if transaction_style == "function_name": @@ -419,7 +419,7 @@ def _before_get_response(request): _patch_drf() - scope = Scope.get_current_scope() + scope = sentry_sdk.get_current_scope() # Rely on WSGI middleware to start a trace _set_transaction_name_and_source(scope, integration.transaction_style, request) @@ -429,7 +429,7 @@ def _before_get_response(request): def _attempt_resolve_again(request, scope, transaction_style): - # type: (WSGIRequest, Scope, str) -> None + # type: (WSGIRequest, sentry_sdk.Scope, str) -> None """ Some django middlewares overwrite request.urlconf so we need to respect that contract, @@ -448,7 +448,7 @@ def _after_get_response(request): if integration.transaction_style != "url": return - scope = Scope.get_current_scope() + scope = sentry_sdk.get_current_scope() _attempt_resolve_again(request, scope, integration.transaction_style) @@ -518,7 +518,7 @@ def _got_request_exception(request=None, **kwargs): integration = client.get_integration(DjangoIntegration) if request is not None and integration.transaction_style == "url": - scope = Scope.get_current_scope() + scope = sentry_sdk.get_current_scope() _attempt_resolve_again(request, scope, integration.transaction_style) event, hint = event_from_exception( diff --git a/sentry_sdk/integrations/django/asgi.py b/sentry_sdk/integrations/django/asgi.py index bbc742abe9..11691de5a4 100644 --- a/sentry_sdk/integrations/django/asgi.py +++ b/sentry_sdk/integrations/django/asgi.py @@ -13,7 +13,6 @@ from django.core.handlers.wsgi import WSGIRequest import sentry_sdk -from sentry_sdk import Scope from sentry_sdk._types import TYPE_CHECKING from sentry_sdk.consts import OP @@ -112,7 +111,7 @@ async def sentry_patched_asgi_handler(self, scope, receive, send): def sentry_patched_create_request(self, *args, **kwargs): # type: (Any, *Any, **Any) -> Any request, error_response = old_create_request(self, *args, **kwargs) - scope = Scope.get_isolation_scope() + scope = sentry_sdk.get_isolation_scope() scope.add_event_processor(_make_asgi_request_event_processor(request)) return request, error_response @@ -169,7 +168,7 @@ def wrap_async_view(callback): @functools.wraps(callback) async def sentry_wrapped_callback(request, *args, **kwargs): # type: (Any, *Any, **Any) -> Any - sentry_scope = Scope.get_isolation_scope() + sentry_scope = sentry_sdk.get_isolation_scope() if sentry_scope.profile is not None: sentry_scope.profile.update_active_thread_id() diff --git a/sentry_sdk/integrations/django/templates.py b/sentry_sdk/integrations/django/templates.py index fb79fdf75b..e91e1a908c 100644 --- a/sentry_sdk/integrations/django/templates.py +++ b/sentry_sdk/integrations/django/templates.py @@ -5,7 +5,6 @@ from django import VERSION as DJANGO_VERSION import sentry_sdk -from sentry_sdk import Scope from sentry_sdk._types import TYPE_CHECKING from sentry_sdk.consts import OP from sentry_sdk.utils import ensure_integration_enabled @@ -93,7 +92,7 @@ def render(request, template_name, context=None, *args, **kwargs): context = context or {} if "sentry_trace_meta" not in context: context["sentry_trace_meta"] = mark_safe( - Scope.get_current_scope().trace_propagation_meta() + sentry_sdk.get_current_scope().trace_propagation_meta() ) with sentry_sdk.start_span( diff --git a/sentry_sdk/integrations/django/views.py b/sentry_sdk/integrations/django/views.py index 01f871a2f6..1bcee492bf 100644 --- a/sentry_sdk/integrations/django/views.py +++ b/sentry_sdk/integrations/django/views.py @@ -1,7 +1,6 @@ import functools import sentry_sdk -from sentry_sdk import Scope from sentry_sdk.consts import OP from sentry_sdk._types import TYPE_CHECKING @@ -76,7 +75,7 @@ def _wrap_sync_view(callback): @functools.wraps(callback) def sentry_wrapped_callback(request, *args, **kwargs): # type: (Any, *Any, **Any) -> Any - sentry_scope = Scope.get_isolation_scope() + sentry_scope = sentry_sdk.get_isolation_scope() # set the active thread id to the handler thread for sync views # this isn't necessary for async views since that runs on main if sentry_scope.profile is not None: diff --git a/sentry_sdk/integrations/falcon.py b/sentry_sdk/integrations/falcon.py index be3fe27519..0e0bfec9c8 100644 --- a/sentry_sdk/integrations/falcon.py +++ b/sentry_sdk/integrations/falcon.py @@ -2,7 +2,6 @@ from sentry_sdk.integrations import Integration, DidNotEnable from sentry_sdk.integrations._wsgi_common import RequestExtractor from sentry_sdk.integrations.wsgi import SentryWsgiMiddleware -from sentry_sdk.scope import Scope from sentry_sdk.tracing import SOURCE_FOR_STYLE from sentry_sdk.utils import ( capture_internal_exceptions, @@ -106,7 +105,7 @@ def process_request(self, req, resp, *args, **kwargs): if integration is None: return - scope = Scope.get_isolation_scope() + scope = sentry_sdk.get_isolation_scope() scope._name = "falcon" scope.add_event_processor(_make_request_event_processor(req, integration)) diff --git a/sentry_sdk/integrations/fastapi.py b/sentry_sdk/integrations/fastapi.py index 8fd18fef96..09784560b4 100644 --- a/sentry_sdk/integrations/fastapi.py +++ b/sentry_sdk/integrations/fastapi.py @@ -5,7 +5,7 @@ import sentry_sdk from sentry_sdk._types import TYPE_CHECKING from sentry_sdk.integrations import DidNotEnable -from sentry_sdk.scope import Scope, should_send_default_pii +from sentry_sdk.scope import should_send_default_pii from sentry_sdk.tracing import SOURCE_FOR_STYLE, TRANSACTION_SOURCE_ROUTE from sentry_sdk.utils import ( transaction_from_function, @@ -43,7 +43,7 @@ def setup_once(): def _set_transaction_name_and_source(scope, transaction_style, request): - # type: (Scope, str, Any) -> None + # type: (sentry_sdk.Scope, str, Any) -> None name = "" if transaction_style == "endpoint": @@ -87,7 +87,7 @@ def _sentry_get_request_handler(*args, **kwargs): @wraps(old_call) def _sentry_call(*args, **kwargs): # type: (*Any, **Any) -> Any - sentry_scope = Scope.get_isolation_scope() + sentry_scope = sentry_sdk.get_isolation_scope() if sentry_scope.profile is not None: sentry_scope.profile.update_active_thread_id() return old_call(*args, **kwargs) @@ -105,9 +105,9 @@ async def _sentry_app(*args, **kwargs): request = args[0] _set_transaction_name_and_source( - Scope.get_current_scope(), integration.transaction_style, request + sentry_sdk.get_current_scope(), integration.transaction_style, request ) - sentry_scope = Scope.get_isolation_scope() + sentry_scope = sentry_sdk.get_isolation_scope() extractor = StarletteRequestExtractor(request) info = await extractor.extract_request_info() diff --git a/sentry_sdk/integrations/flask.py b/sentry_sdk/integrations/flask.py index 783576839a..8d82c57695 100644 --- a/sentry_sdk/integrations/flask.py +++ b/sentry_sdk/integrations/flask.py @@ -3,7 +3,7 @@ from sentry_sdk.integrations import DidNotEnable, Integration from sentry_sdk.integrations._wsgi_common import RequestExtractor from sentry_sdk.integrations.wsgi import SentryWsgiMiddleware -from sentry_sdk.scope import Scope, should_send_default_pii +from sentry_sdk.scope import should_send_default_pii from sentry_sdk.tracing import SOURCE_FOR_STYLE from sentry_sdk.utils import ( capture_internal_exceptions, @@ -96,14 +96,14 @@ def _add_sentry_trace(sender, template, context, **extra): if "sentry_trace" in context: return - scope = Scope.get_current_scope() + scope = sentry_sdk.get_current_scope() trace_meta = Markup(scope.trace_propagation_meta()) context["sentry_trace"] = trace_meta # for backwards compatibility context["sentry_trace_meta"] = trace_meta def _set_transaction_name_and_source(scope, transaction_style, request): - # type: (Scope, str, Request) -> None + # type: (sentry_sdk.Scope, str, Request) -> None try: name_for_style = { "url": request.url_rule.rule, @@ -126,10 +126,10 @@ def _request_started(app, **kwargs): # Set the transaction name and source here, # but rely on WSGI middleware to actually start the transaction _set_transaction_name_and_source( - Scope.get_current_scope(), integration.transaction_style, request + sentry_sdk.get_current_scope(), integration.transaction_style, request ) - scope = Scope.get_isolation_scope() + scope = sentry_sdk.get_isolation_scope() evt_processor = _make_request_event_processor(app, request, integration) scope.add_event_processor(evt_processor) diff --git a/sentry_sdk/integrations/gql.py b/sentry_sdk/integrations/gql.py index 0552edde60..220095f2ac 100644 --- a/sentry_sdk/integrations/gql.py +++ b/sentry_sdk/integrations/gql.py @@ -6,7 +6,7 @@ ) from sentry_sdk.integrations import DidNotEnable, Integration -from sentry_sdk.scope import Scope, should_send_default_pii +from sentry_sdk.scope import should_send_default_pii try: import gql # type: ignore[import-not-found] @@ -94,7 +94,7 @@ def _patch_execute(): @ensure_integration_enabled(GQLIntegration, real_execute) def sentry_patched_execute(self, document, *args, **kwargs): # type: (gql.Client, DocumentNode, Any, Any) -> Any - scope = Scope.get_isolation_scope() + scope = sentry_sdk.get_isolation_scope() scope.add_event_processor(_make_gql_event_processor(self, document)) try: diff --git a/sentry_sdk/integrations/graphene.py b/sentry_sdk/integrations/graphene.py index 5b8c393743..aa16dce92b 100644 --- a/sentry_sdk/integrations/graphene.py +++ b/sentry_sdk/integrations/graphene.py @@ -1,6 +1,9 @@ +from contextlib import contextmanager + import sentry_sdk +from sentry_sdk.consts import OP from sentry_sdk.integrations import DidNotEnable, Integration -from sentry_sdk.scope import Scope, should_send_default_pii +from sentry_sdk.scope import should_send_default_pii from sentry_sdk.utils import ( capture_internal_exceptions, ensure_integration_enabled, @@ -17,6 +20,7 @@ if TYPE_CHECKING: + from collections.abc import Generator from typing import Any, Dict, Union from graphene.language.source import Source # type: ignore from graphql.execution import ExecutionResult # type: ignore @@ -49,16 +53,18 @@ def _patch_graphql(): @ensure_integration_enabled(GrapheneIntegration, old_graphql_sync) def _sentry_patched_graphql_sync(schema, source, *args, **kwargs): # type: (GraphQLSchema, Union[str, Source], Any, Any) -> ExecutionResult - scope = Scope.get_isolation_scope() + scope = sentry_sdk.get_isolation_scope() scope.add_event_processor(_event_processor) - result = old_graphql_sync(schema, source, *args, **kwargs) + with graphql_span(schema, source, kwargs): + result = old_graphql_sync(schema, source, *args, **kwargs) with capture_internal_exceptions(): + client = sentry_sdk.get_client() for error in result.errors or []: event, hint = event_from_exception( error, - client_options=sentry_sdk.get_client().options, + client_options=client.options, mechanism={ "type": GrapheneIntegration.identifier, "handled": False, @@ -70,19 +76,22 @@ def _sentry_patched_graphql_sync(schema, source, *args, **kwargs): async def _sentry_patched_graphql_async(schema, source, *args, **kwargs): # type: (GraphQLSchema, Union[str, Source], Any, Any) -> ExecutionResult - if sentry_sdk.get_client().get_integration(GrapheneIntegration) is None: + integration = sentry_sdk.get_client().get_integration(GrapheneIntegration) + if integration is None: return await old_graphql_async(schema, source, *args, **kwargs) - scope = Scope.get_isolation_scope() + scope = sentry_sdk.get_isolation_scope() scope.add_event_processor(_event_processor) - result = await old_graphql_async(schema, source, *args, **kwargs) + with graphql_span(schema, source, kwargs): + result = await old_graphql_async(schema, source, *args, **kwargs) with capture_internal_exceptions(): + client = sentry_sdk.get_client() for error in result.errors or []: event, hint = event_from_exception( error, - client_options=sentry_sdk.get_client().options, + client_options=client.options, mechanism={ "type": GrapheneIntegration.identifier, "handled": False, @@ -106,3 +115,43 @@ def _event_processor(event, hint): del event["request"]["data"] return event + + +@contextmanager +def graphql_span(schema, source, kwargs): + # type: (GraphQLSchema, Union[str, Source], Dict[str, Any]) -> Generator[None, None, None] + operation_name = kwargs.get("operation_name") + + operation_type = "query" + op = OP.GRAPHQL_QUERY + if source.strip().startswith("mutation"): + operation_type = "mutation" + op = OP.GRAPHQL_MUTATION + elif source.strip().startswith("subscription"): + operation_type = "subscription" + op = OP.GRAPHQL_SUBSCRIPTION + + sentry_sdk.add_breadcrumb( + crumb={ + "data": { + "operation_name": operation_name, + "operation_type": operation_type, + }, + "category": "graphql.operation", + }, + ) + + scope = sentry_sdk.get_current_scope() + if scope.span: + _graphql_span = scope.span.start_child(op=op, description=operation_name) + else: + _graphql_span = sentry_sdk.start_span(op=op, description=operation_name) + + _graphql_span.set_data("graphql.document", source) + _graphql_span.set_data("graphql.operation.name", operation_name) + _graphql_span.set_data("graphql.operation.type", operation_type) + + try: + yield + finally: + _graphql_span.finish() diff --git a/sentry_sdk/integrations/grpc/aio/client.py b/sentry_sdk/integrations/grpc/aio/client.py index b67481b5b5..143f0e43a9 100644 --- a/sentry_sdk/integrations/grpc/aio/client.py +++ b/sentry_sdk/integrations/grpc/aio/client.py @@ -12,7 +12,6 @@ import sentry_sdk from sentry_sdk.consts import OP from sentry_sdk.integrations.grpc.consts import SPAN_ORIGIN -from sentry_sdk.scope import Scope class ClientInterceptor: @@ -23,7 +22,10 @@ def _update_client_call_details_metadata_from_scope( metadata = ( list(client_call_details.metadata) if client_call_details.metadata else [] ) - for key, value in Scope.get_current_scope().iter_trace_propagation_headers(): + for ( + key, + value, + ) in sentry_sdk.get_current_scope().iter_trace_propagation_headers(): metadata.append((key, value)) client_call_details = ClientCallDetails( diff --git a/sentry_sdk/integrations/grpc/client.py b/sentry_sdk/integrations/grpc/client.py index c4e89f3737..c12f0ab2c4 100644 --- a/sentry_sdk/integrations/grpc/client.py +++ b/sentry_sdk/integrations/grpc/client.py @@ -3,7 +3,6 @@ from sentry_sdk.consts import OP from sentry_sdk.integrations import DidNotEnable from sentry_sdk.integrations.grpc.consts import SPAN_ORIGIN -from sentry_sdk.scope import Scope if TYPE_CHECKING: from typing import Any, Callable, Iterator, Iterable, Union @@ -74,7 +73,10 @@ def _update_client_call_details_metadata_from_scope(client_call_details): metadata = ( list(client_call_details.metadata) if client_call_details.metadata else [] ) - for key, value in Scope.get_current_scope().iter_trace_propagation_headers(): + for ( + key, + value, + ) in sentry_sdk.get_current_scope().iter_trace_propagation_headers(): metadata.append((key, value)) client_call_details = grpc._interceptor._ClientCallDetails( diff --git a/sentry_sdk/integrations/httpx.py b/sentry_sdk/integrations/httpx.py index e19455118d..d35990cb30 100644 --- a/sentry_sdk/integrations/httpx.py +++ b/sentry_sdk/integrations/httpx.py @@ -1,7 +1,6 @@ import sentry_sdk from sentry_sdk.consts import OP, SPANDATA from sentry_sdk.integrations import Integration, DidNotEnable -from sentry_sdk.scope import Scope from sentry_sdk.tracing import BAGGAGE_HEADER_NAME from sentry_sdk.tracing_utils import should_propagate_trace from sentry_sdk.utils import ( @@ -71,7 +70,7 @@ def send(self, request, **kwargs): for ( key, value, - ) in Scope.get_current_scope().iter_trace_propagation_headers(): + ) in sentry_sdk.get_current_scope().iter_trace_propagation_headers(): logger.debug( "[Tracing] Adding `{key}` header {value} to outgoing request to {url}.".format( key=key, value=value, url=request.url @@ -127,7 +126,7 @@ async def send(self, request, **kwargs): for ( key, value, - ) in Scope.get_current_scope().iter_trace_propagation_headers(): + ) in sentry_sdk.get_current_scope().iter_trace_propagation_headers(): logger.debug( "[Tracing] Adding `{key}` header {value} to outgoing request to {url}.".format( key=key, value=value, url=request.url diff --git a/sentry_sdk/integrations/huey.py b/sentry_sdk/integrations/huey.py index 254775386f..21ccf95813 100644 --- a/sentry_sdk/integrations/huey.py +++ b/sentry_sdk/integrations/huey.py @@ -6,7 +6,7 @@ from sentry_sdk.api import continue_trace, get_baggage, get_traceparent from sentry_sdk.consts import OP, SPANSTATUS from sentry_sdk.integrations import DidNotEnable, Integration -from sentry_sdk.scope import Scope, should_send_default_pii +from sentry_sdk.scope import should_send_default_pii from sentry_sdk.tracing import ( BAGGAGE_HEADER_NAME, SENTRY_TRACE_HEADER_NAME, @@ -106,7 +106,7 @@ def event_processor(event, hint): def _capture_exception(exc_info): # type: (ExcInfo) -> None - scope = Scope.get_current_scope() + scope = sentry_sdk.get_current_scope() if exc_info[0] in HUEY_CONTROL_FLOW_EXCEPTIONS: scope.transaction.set_status(SPANSTATUS.ABORTED) @@ -115,7 +115,7 @@ def _capture_exception(exc_info): scope.transaction.set_status(SPANSTATUS.INTERNAL_ERROR) event, hint = event_from_exception( exc_info, - client_options=Scope.get_client().options, + client_options=sentry_sdk.get_client().options, mechanism={"type": HueyIntegration.identifier, "handled": False}, ) scope.capture_event(event, hint=hint) diff --git a/sentry_sdk/integrations/opentelemetry/distro.py b/sentry_sdk/integrations/opentelemetry/distro.py deleted file mode 100644 index 87a49a09c3..0000000000 --- a/sentry_sdk/integrations/opentelemetry/distro.py +++ /dev/null @@ -1,66 +0,0 @@ -""" -IMPORTANT: The contents of this file are part of a proof of concept and as such -are experimental and not suitable for production use. They may be changed or -removed at any time without prior notice. -""" - -from sentry_sdk.integrations import DidNotEnable -from sentry_sdk.integrations.opentelemetry.propagator import SentryPropagator -from sentry_sdk.integrations.opentelemetry.span_processor import SentrySpanProcessor -from sentry_sdk.utils import logger -from sentry_sdk._types import TYPE_CHECKING - -try: - from opentelemetry import trace - from opentelemetry.instrumentation.distro import BaseDistro # type: ignore[attr-defined] - from opentelemetry.propagate import set_global_textmap - from opentelemetry.sdk.trace import TracerProvider -except ImportError: - raise DidNotEnable("opentelemetry not installed") - -try: - from opentelemetry.instrumentation.django import DjangoInstrumentor # type: ignore -except ImportError: - DjangoInstrumentor = None - -try: - from opentelemetry.instrumentation.flask import FlaskInstrumentor # type: ignore -except ImportError: - FlaskInstrumentor = None - -if TYPE_CHECKING: - # XXX pkg_resources is deprecated, there's a PR to switch to importlib: - # https://github.com/open-telemetry/opentelemetry-python-contrib/pull/2181 - # we should align this when the PR gets merged - from pkg_resources import EntryPoint - from typing import Any - - -CONFIGURABLE_INSTRUMENTATIONS = { - DjangoInstrumentor: {"is_sql_commentor_enabled": True}, - FlaskInstrumentor: {"enable_commenter": True}, -} - - -class _SentryDistro(BaseDistro): # type: ignore[misc] - def _configure(self, **kwargs): - # type: (Any) -> None - provider = TracerProvider() - provider.add_span_processor(SentrySpanProcessor()) - trace.set_tracer_provider(provider) - set_global_textmap(SentryPropagator()) - - def load_instrumentor(self, entry_point, **kwargs): - # type: (EntryPoint, Any) -> None - instrumentor = entry_point.load() - - if instrumentor in CONFIGURABLE_INSTRUMENTATIONS: - for key, value in CONFIGURABLE_INSTRUMENTATIONS[instrumentor].items(): - kwargs[key] = value - - instrumentor().instrument(**kwargs) - logger.debug( - "[OTel] %s instrumented (%s)", - entry_point.name, - ", ".join([f"{k}: {v}" for k, v in kwargs.items()]), - ) diff --git a/sentry_sdk/integrations/opentelemetry/integration.py b/sentry_sdk/integrations/opentelemetry/integration.py index b765703f54..43e0396c16 100644 --- a/sentry_sdk/integrations/opentelemetry/integration.py +++ b/sentry_sdk/integrations/opentelemetry/integration.py @@ -4,32 +4,26 @@ removed at any time without prior notice. """ -import sys -from importlib import import_module - from sentry_sdk.integrations import DidNotEnable, Integration -from sentry_sdk.integrations.opentelemetry.distro import _SentryDistro -from sentry_sdk.utils import logger, _get_installed_modules -from sentry_sdk._types import TYPE_CHECKING +from sentry_sdk.integrations.opentelemetry.propagator import SentryPropagator +from sentry_sdk.integrations.opentelemetry.span_processor import SentrySpanProcessor +from sentry_sdk.utils import logger try: - from opentelemetry.instrumentation.auto_instrumentation._load import ( - _load_instrumentors, - ) + from opentelemetry import trace + from opentelemetry.propagate import set_global_textmap + from opentelemetry.sdk.trace import TracerProvider except ImportError: raise DidNotEnable("opentelemetry not installed") -if TYPE_CHECKING: - from typing import Dict +try: + from opentelemetry.instrumentation.django import DjangoInstrumentor # type: ignore[import-not-found] +except ImportError: + DjangoInstrumentor = None -CLASSES_TO_INSTRUMENT = { - # A mapping of packages to their entry point class that will be instrumented. - # This is used to post-instrument any classes that were imported before OTel - # instrumentation took place. - "fastapi": "fastapi.FastAPI", - "flask": "flask.Flask", - # XXX Add a mapping for all instrumentors that patch by replacing a class +CONFIGURABLE_INSTRUMENTATIONS = { + DjangoInstrumentor: {"is_sql_commentor_enabled": True}, } @@ -44,123 +38,21 @@ def setup_once(): "Use at your own risk." ) - original_classes = _record_unpatched_classes() - - try: - distro = _SentryDistro() - distro.configure() - # XXX This does some initial checks before loading instrumentations - # (checks OTEL_PYTHON_DISABLED_INSTRUMENTATIONS, checks version - # compat). If we don't want this in the future, we can implement our - # own _load_instrumentors (it anyway just iterates over - # opentelemetry_instrumentor entry points). - _load_instrumentors(distro) - except Exception: - logger.exception("[OTel] Failed to auto-initialize OpenTelemetry") - - # XXX: Consider whether this is ok to keep and make default. - # The alternative is asking folks to follow specific import order for - # some integrations (sentry_sdk.init before you even import Flask, for - # instance). - try: - _patch_remaining_classes(original_classes) - except Exception: - logger.exception( - "[OTel] Failed to post-patch instrumented classes. " - "You might have to make sure sentry_sdk.init() is called before importing anything else." - ) + _setup_sentry_tracing() + # _setup_instrumentors() logger.debug("[OTel] Finished setting up OpenTelemetry integration") -def _record_unpatched_classes(): - # type: () -> Dict[str, type] - """ - Keep references to classes that are about to be instrumented. - - Used to search for unpatched classes after the instrumentation has run so - that they can be patched manually. - """ - installed_packages = _get_installed_modules() - - original_classes = {} - - for package, orig_path in CLASSES_TO_INSTRUMENT.items(): - if package in installed_packages: - try: - original_cls = _import_by_path(orig_path) - except (AttributeError, ImportError): - logger.debug("[OTel] Failed to import %s", orig_path) - continue - - original_classes[package] = original_cls - - return original_classes - - -def _patch_remaining_classes(original_classes): - # type: (Dict[str, type]) -> None - """ - Best-effort attempt to patch any uninstrumented classes in sys.modules. - - This enables us to not care about the order of imports and sentry_sdk.init() - in user code. If e.g. the Flask class had been imported before sentry_sdk - was init()ed (and therefore before the OTel instrumentation ran), it would - not be instrumented. This function goes over remaining uninstrumented - occurrences of the class in sys.modules and replaces them with the - instrumented class. - - Since this is looking for exact matches, it will not work in some scenarios - (e.g. if someone is not using the specific class explicitly, but rather - inheriting from it). In those cases it's still necessary to sentry_sdk.init() - before importing anything that's supposed to be instrumented. - """ - # check which classes have actually been instrumented - instrumented_classes = {} - - for package in list(original_classes.keys()): - original_path = CLASSES_TO_INSTRUMENT[package] - - try: - cls = _import_by_path(original_path) - except (AttributeError, ImportError): - logger.debug( - "[OTel] Failed to check if class has been instrumented: %s", - original_path, - ) - del original_classes[package] - continue - - if not cls.__module__.startswith("opentelemetry."): - del original_classes[package] - continue - - instrumented_classes[package] = cls - - if not instrumented_classes: - return - - # replace occurrences of the original unpatched class in sys.modules - for module_name, module in sys.modules.copy().items(): - if ( - module_name.startswith("sentry_sdk") - or module_name in sys.builtin_module_names - ): - continue - - for package, original_cls in original_classes.items(): - for var_name, var in vars(module).copy().items(): - if var == original_cls: - logger.debug( - "[OTel] Additionally patching %s from %s", - original_cls, - module_name, - ) - - setattr(module, var_name, instrumented_classes[package]) +def _setup_sentry_tracing(): + # type: () -> None + provider = TracerProvider() + provider.add_span_processor(SentrySpanProcessor()) + trace.set_tracer_provider(provider) + set_global_textmap(SentryPropagator()) -def _import_by_path(path): - # type: (str) -> type - parts = path.rsplit(".", maxsplit=1) - return getattr(import_module(parts[0]), parts[-1]) +def _setup_instrumentors(): + # type: () -> None + for instrumentor, kwargs in CONFIGURABLE_INSTRUMENTATIONS.items(): + instrumentor().instrument(**kwargs) diff --git a/sentry_sdk/integrations/pyramid.py b/sentry_sdk/integrations/pyramid.py index b7404c8bec..887837c0d6 100644 --- a/sentry_sdk/integrations/pyramid.py +++ b/sentry_sdk/integrations/pyramid.py @@ -6,7 +6,7 @@ from sentry_sdk.integrations import Integration, DidNotEnable from sentry_sdk.integrations._wsgi_common import RequestExtractor from sentry_sdk.integrations.wsgi import SentryWsgiMiddleware -from sentry_sdk.scope import Scope, should_send_default_pii +from sentry_sdk.scope import should_send_default_pii from sentry_sdk.tracing import SOURCE_FOR_STYLE from sentry_sdk.utils import ( capture_internal_exceptions, @@ -79,9 +79,9 @@ def sentry_patched_call_view(registry, request, *args, **kwargs): integration = sentry_sdk.get_client().get_integration(PyramidIntegration) _set_transaction_name_and_source( - Scope.get_current_scope(), integration.transaction_style, request + sentry_sdk.get_current_scope(), integration.transaction_style, request ) - scope = Scope.get_isolation_scope() + scope = sentry_sdk.get_isolation_scope() scope.add_event_processor( _make_event_processor(weakref.ref(request), integration) ) @@ -149,7 +149,7 @@ def _capture_exception(exc_info): def _set_transaction_name_and_source(scope, transaction_style, request): - # type: (Scope, str, Request) -> None + # type: (sentry_sdk.Scope, str, Request) -> None try: name_for_style = { "route_name": request.matched_route.name, diff --git a/sentry_sdk/integrations/quart.py b/sentry_sdk/integrations/quart.py index 662074cf9b..0689406672 100644 --- a/sentry_sdk/integrations/quart.py +++ b/sentry_sdk/integrations/quart.py @@ -7,7 +7,7 @@ from sentry_sdk.integrations import DidNotEnable, Integration from sentry_sdk.integrations._wsgi_common import _filter_headers from sentry_sdk.integrations.asgi import SentryAsgiMiddleware -from sentry_sdk.scope import Scope, should_send_default_pii +from sentry_sdk.scope import should_send_default_pii from sentry_sdk.tracing import SOURCE_FOR_STYLE from sentry_sdk.utils import ( capture_internal_exceptions, @@ -122,7 +122,7 @@ def decorator(old_func): @ensure_integration_enabled(QuartIntegration, old_func) def _sentry_func(*args, **kwargs): # type: (*Any, **Any) -> Any - scope = Scope.get_isolation_scope() + scope = sentry_sdk.get_isolation_scope() if scope.profile is not None: scope.profile.active_thread_id = ( threading.current_thread().ident @@ -140,7 +140,7 @@ def _sentry_func(*args, **kwargs): def _set_transaction_name_and_source(scope, transaction_style, request): - # type: (Scope, str, Request) -> None + # type: (sentry_sdk.Scope, str, Request) -> None try: name_for_style = { @@ -169,10 +169,10 @@ async def _request_websocket_started(app, **kwargs): # Set the transaction name here, but rely on ASGI middleware # to actually start the transaction _set_transaction_name_and_source( - Scope.get_current_scope(), integration.transaction_style, request_websocket + sentry_sdk.get_current_scope(), integration.transaction_style, request_websocket ) - scope = Scope.get_isolation_scope() + scope = sentry_sdk.get_isolation_scope() evt_processor = _make_request_event_processor(app, request_websocket, integration) scope.add_event_processor(evt_processor) diff --git a/sentry_sdk/integrations/rq.py b/sentry_sdk/integrations/rq.py index fc5c3faf76..6afb07c92d 100644 --- a/sentry_sdk/integrations/rq.py +++ b/sentry_sdk/integrations/rq.py @@ -6,7 +6,6 @@ from sentry_sdk.integrations import DidNotEnable, Integration from sentry_sdk.integrations.logging import ignore_logger from sentry_sdk.tracing import TRANSACTION_SOURCE_TASK -from sentry_sdk.scope import Scope from sentry_sdk.utils import ( capture_internal_exceptions, ensure_integration_enabled, @@ -105,7 +104,7 @@ def sentry_patched_handle_exception(self, job, *exc_info, **kwargs): @ensure_integration_enabled(RqIntegration, old_enqueue_job) def sentry_patched_enqueue_job(self, job, **kwargs): # type: (Queue, Any, **Any) -> Any - scope = Scope.get_current_scope() + scope = sentry_sdk.get_current_scope() if scope.span is not None: job.meta["_sentry_trace_headers"] = dict( scope.iter_trace_propagation_headers() diff --git a/sentry_sdk/integrations/sanic.py b/sentry_sdk/integrations/sanic.py index 46250926ef..36e3b4c892 100644 --- a/sentry_sdk/integrations/sanic.py +++ b/sentry_sdk/integrations/sanic.py @@ -10,7 +10,6 @@ from sentry_sdk.integrations._wsgi_common import RequestExtractor, _filter_headers from sentry_sdk.integrations.logging import ignore_logger from sentry_sdk.tracing import TRANSACTION_SOURCE_COMPONENT, TRANSACTION_SOURCE_URL -from sentry_sdk.scope import Scope from sentry_sdk.utils import ( capture_internal_exceptions, ensure_integration_enabled, @@ -235,7 +234,7 @@ async def _set_transaction(request, route, **_): # type: (Request, Route, **Any) -> None if request.ctx._sentry_do_integration: with capture_internal_exceptions(): - scope = Scope.get_current_scope() + scope = sentry_sdk.get_current_scope() route_name = route.name.replace(request.app.name, "").strip(".") scope.set_transaction_name(route_name, source=TRANSACTION_SOURCE_COMPONENT) @@ -297,7 +296,7 @@ def _legacy_router_get(self, *args): rv = old_router_get(self, *args) if sentry_sdk.get_client().get_integration(SanicIntegration) is not None: with capture_internal_exceptions(): - scope = Scope.get_isolation_scope() + scope = sentry_sdk.get_isolation_scope() if SanicIntegration.version and SanicIntegration.version >= (21, 3): # Sanic versions above and including 21.3 append the app name to the # route name, and so we need to remove it from Route name so the diff --git a/sentry_sdk/integrations/spark/spark_driver.py b/sentry_sdk/integrations/spark/spark_driver.py index 4c7f694ec0..b55550cbef 100644 --- a/sentry_sdk/integrations/spark/spark_driver.py +++ b/sentry_sdk/integrations/spark/spark_driver.py @@ -1,6 +1,5 @@ import sentry_sdk from sentry_sdk.integrations import Integration -from sentry_sdk.scope import Scope from sentry_sdk.utils import capture_internal_exceptions, ensure_integration_enabled from sentry_sdk._types import TYPE_CHECKING @@ -63,7 +62,7 @@ def _sentry_patched_spark_context_init(self, *args, **kwargs): _start_sentry_listener(self) _set_app_properties() - scope = Scope.get_isolation_scope() + scope = sentry_sdk.get_isolation_scope() @scope.add_event_processor def process_event(event, hint): diff --git a/sentry_sdk/integrations/spark/spark_worker.py b/sentry_sdk/integrations/spark/spark_worker.py index fa18896516..d9e598603e 100644 --- a/sentry_sdk/integrations/spark/spark_worker.py +++ b/sentry_sdk/integrations/spark/spark_worker.py @@ -2,7 +2,6 @@ import sentry_sdk from sentry_sdk.integrations import Integration -from sentry_sdk.scope import Scope from sentry_sdk.utils import ( capture_internal_exceptions, exc_info_from_error, @@ -65,7 +64,7 @@ def _tag_task_context(): # type: () -> None from pyspark.taskcontext import TaskContext - scope = Scope.get_isolation_scope() + scope = sentry_sdk.get_isolation_scope() @scope.add_event_processor def process_event(event, hint): diff --git a/sentry_sdk/integrations/starlette.py b/sentry_sdk/integrations/starlette.py index c417b834be..3b7aa11a93 100644 --- a/sentry_sdk/integrations/starlette.py +++ b/sentry_sdk/integrations/starlette.py @@ -12,7 +12,7 @@ request_body_within_bounds, ) from sentry_sdk.integrations.asgi import SentryAsgiMiddleware -from sentry_sdk.scope import Scope, should_send_default_pii +from sentry_sdk.scope import should_send_default_pii from sentry_sdk.tracing import ( SOURCE_FOR_STYLE, TRANSACTION_SOURCE_COMPONENT, @@ -124,7 +124,7 @@ async def _create_span_call(app, scope, receive, send, **kwargs): # Update transaction name with middleware name name, source = _get_transaction_from_middleware(app, scope, integration) if name is not None: - Scope.get_current_scope().set_transaction_name( + sentry_sdk.get_current_scope().set_transaction_name( name, source=source, ) @@ -298,7 +298,7 @@ def _add_user_to_sentry_scope(scope): if email: user_info.setdefault("email", starlette_user.email) - sentry_scope = Scope.get_isolation_scope() + sentry_scope = sentry_sdk.get_isolation_scope() sentry_scope.user = user_info @@ -410,10 +410,12 @@ async def _sentry_async_func(*args, **kwargs): request = args[0] _set_transaction_name_and_source( - Scope.get_current_scope(), integration.transaction_style, request + sentry_sdk.get_current_scope(), + integration.transaction_style, + request, ) - sentry_scope = Scope.get_isolation_scope() + sentry_scope = sentry_sdk.get_isolation_scope() extractor = StarletteRequestExtractor(request) info = await extractor.extract_request_info() @@ -452,7 +454,7 @@ def _sentry_sync_func(*args, **kwargs): integration = sentry_sdk.get_client().get_integration( StarletteIntegration ) - sentry_scope = Scope.get_isolation_scope() + sentry_scope = sentry_sdk.get_isolation_scope() if sentry_scope.profile is not None: sentry_scope.profile.update_active_thread_id() @@ -521,7 +523,9 @@ def _sentry_jinja2templates_init(self, *args, **kwargs): # type: (Jinja2Templates, *Any, **Any) -> None def add_sentry_trace_meta(request): # type: (Request) -> Dict[str, Any] - trace_meta = Markup(Scope.get_current_scope().trace_propagation_meta()) + trace_meta = Markup( + sentry_sdk.get_current_scope().trace_propagation_meta() + ) return { "sentry_trace_meta": trace_meta, } @@ -655,7 +659,7 @@ def _transaction_name_from_router(scope): def _set_transaction_name_and_source(scope, transaction_style, request): - # type: (Scope, str, Any) -> None + # type: (sentry_sdk.Scope, str, Any) -> None name = None source = SOURCE_FOR_STYLE[transaction_style] diff --git a/sentry_sdk/integrations/starlite.py b/sentry_sdk/integrations/starlite.py index 9ff5045d6c..07259563e0 100644 --- a/sentry_sdk/integrations/starlite.py +++ b/sentry_sdk/integrations/starlite.py @@ -4,7 +4,7 @@ from sentry_sdk.consts import OP from sentry_sdk.integrations import DidNotEnable, Integration from sentry_sdk.integrations.asgi import SentryAsgiMiddleware -from sentry_sdk.scope import Scope as SentryScope, should_send_default_pii +from sentry_sdk.scope import should_send_default_pii from sentry_sdk.tracing import SOURCE_FOR_STYLE, TRANSACTION_SOURCE_ROUTE from sentry_sdk.utils import ( ensure_integration_enabled, @@ -190,7 +190,7 @@ async def handle_wrapper( if sentry_sdk.get_client().get_integration(StarliteIntegration) is None: return await old_handle(self, scope, receive, send) - sentry_scope = SentryScope.get_isolation_scope() + sentry_scope = sentry_sdk.get_isolation_scope() request: "Request[Any, Any]" = scope["app"].request_class( scope=scope, receive=receive, send=send ) @@ -268,7 +268,7 @@ def exception_handler(exc: Exception, scope: "StarliteScope", _: "State") -> Non if should_send_default_pii(): user_info = retrieve_user_from_scope(scope) if user_info and isinstance(user_info, dict): - sentry_scope = SentryScope.get_isolation_scope() + sentry_scope = sentry_sdk.get_isolation_scope() sentry_scope.set_user(user_info) event, hint = event_from_exception( diff --git a/sentry_sdk/integrations/stdlib.py b/sentry_sdk/integrations/stdlib.py index e0b4d06794..ad8e965a4a 100644 --- a/sentry_sdk/integrations/stdlib.py +++ b/sentry_sdk/integrations/stdlib.py @@ -7,7 +7,7 @@ import sentry_sdk from sentry_sdk.consts import OP, SPANDATA from sentry_sdk.integrations import Integration -from sentry_sdk.scope import Scope, add_global_event_processor +from sentry_sdk.scope import add_global_event_processor from sentry_sdk.tracing_utils import EnvironHeaders, should_propagate_trace from sentry_sdk.utils import ( SENSITIVE_DATA_SUBSTITUTE, @@ -102,7 +102,10 @@ def putrequest(self, method, url, *args, **kwargs): rv = real_putrequest(self, method, url, *args, **kwargs) if should_propagate_trace(client, real_url): - for key, value in Scope.get_current_scope().iter_trace_propagation_headers( + for ( + key, + value, + ) in sentry_sdk.get_current_scope().iter_trace_propagation_headers( span=span ): logger.debug( @@ -202,7 +205,7 @@ def sentry_patched_popen_init(self, *a, **kw): description=description, origin="auto.subprocess.stdlib.subprocess", ) as span: - for k, v in Scope.get_current_scope().iter_trace_propagation_headers( + for k, v in sentry_sdk.get_current_scope().iter_trace_propagation_headers( span=span ): if env is None: diff --git a/sentry_sdk/integrations/strawberry.py b/sentry_sdk/integrations/strawberry.py index 326dd37fd6..148edac334 100644 --- a/sentry_sdk/integrations/strawberry.py +++ b/sentry_sdk/integrations/strawberry.py @@ -5,7 +5,7 @@ from sentry_sdk.consts import OP from sentry_sdk.integrations import Integration, DidNotEnable from sentry_sdk.integrations.logging import ignore_logger -from sentry_sdk.scope import Scope, should_send_default_pii +from sentry_sdk.scope import should_send_default_pii from sentry_sdk.tracing import TRANSACTION_SOURCE_COMPONENT from sentry_sdk.utils import ( capture_internal_exceptions, @@ -297,7 +297,7 @@ async def _sentry_patched_execute_async(*args, **kwargs): return result if "execution_context" in kwargs and result.errors: - scope = Scope.get_isolation_scope() + scope = sentry_sdk.get_isolation_scope() event_processor = _make_request_event_processor(kwargs["execution_context"]) scope.add_event_processor(event_processor) @@ -309,7 +309,7 @@ def _sentry_patched_execute_sync(*args, **kwargs): result = old_execute_sync(*args, **kwargs) if "execution_context" in kwargs and result.errors: - scope = Scope.get_isolation_scope() + scope = sentry_sdk.get_isolation_scope() event_processor = _make_request_event_processor(kwargs["execution_context"]) scope.add_event_processor(event_processor) @@ -340,7 +340,7 @@ def _sentry_patched_handle_errors(self, errors, response_data): if not errors: return - scope = Scope.get_isolation_scope() + scope = sentry_sdk.get_isolation_scope() event_processor = _make_response_event_processor(response_data) scope.add_event_processor(event_processor) diff --git a/sentry_sdk/integrations/threading.py b/sentry_sdk/integrations/threading.py index 63b6e13846..6dd6acbae1 100644 --- a/sentry_sdk/integrations/threading.py +++ b/sentry_sdk/integrations/threading.py @@ -5,7 +5,7 @@ import sentry_sdk from sentry_sdk._types import TYPE_CHECKING from sentry_sdk.integrations import Integration -from sentry_sdk.scope import Scope, use_isolation_scope, use_scope +from sentry_sdk.scope import use_isolation_scope, use_scope from sentry_sdk.utils import ( ensure_integration_enabled, event_from_exception, @@ -55,8 +55,8 @@ def sentry_start(self, *a, **kw): # type: (Thread, *Any, **Any) -> Any integration = sentry_sdk.get_client().get_integration(ThreadingIntegration) if integration.propagate_scope: - isolation_scope = sentry_sdk.Scope.get_isolation_scope() - current_scope = sentry_sdk.Scope.get_current_scope() + isolation_scope = sentry_sdk.get_isolation_scope() + current_scope = sentry_sdk.get_current_scope() else: isolation_scope = None current_scope = None @@ -81,7 +81,7 @@ def sentry_start(self, *a, **kw): def _wrap_run(isolation_scope_to_use, current_scope_to_use, old_run_func): - # type: (Optional[Scope], Optional[Scope], F) -> F + # type: (Optional[sentry_sdk.Scope], Optional[sentry_sdk.Scope], F) -> F @wraps(old_run_func) def run(*a, **kw): # type: (*Any, **Any) -> Any diff --git a/sentry_sdk/metrics.py b/sentry_sdk/metrics.py index dfc1d89734..452bb61658 100644 --- a/sentry_sdk/metrics.py +++ b/sentry_sdk/metrics.py @@ -738,7 +738,7 @@ def _get_aggregator_and_update_tags(key, value, unit, tags): updated_tags.setdefault("release", client.options["release"]) updated_tags.setdefault("environment", client.options["environment"]) - scope = sentry_sdk.Scope.get_current_scope() + scope = sentry_sdk.get_current_scope() local_aggregator = None # We go with the low-level API here to access transaction information as diff --git a/sentry_sdk/profiler/transaction_profiler.py b/sentry_sdk/profiler/transaction_profiler.py index e8ebfa6450..6ed983fb59 100644 --- a/sentry_sdk/profiler/transaction_profiler.py +++ b/sentry_sdk/profiler/transaction_profiler.py @@ -288,7 +288,7 @@ def _set_initial_sampling_decision(self, sampling_context): self.sampled = False return - client = sentry_sdk.Scope.get_client() + client = sentry_sdk.get_client() if not client.is_active(): self.sampled = False return @@ -356,7 +356,7 @@ def stop(self): def __enter__(self): # type: () -> Profile - scope = sentry_sdk.scope.Scope.get_isolation_scope() + scope = sentry_sdk.get_isolation_scope() old_profile = scope.profile scope.profile = self @@ -492,7 +492,7 @@ def to_json(self, event_opt, options): def valid(self): # type: () -> bool - client = sentry_sdk.Scope.get_client() + client = sentry_sdk.get_client() if not client.is_active(): return False diff --git a/sentry_sdk/scope.py b/sentry_sdk/scope.py index 233dfa25f2..ac47445e17 100644 --- a/sentry_sdk/scope.py +++ b/sentry_sdk/scope.py @@ -28,6 +28,7 @@ ) from sentry_sdk._types import TYPE_CHECKING from sentry_sdk.utils import ( + capture_internal_exception, capture_internal_exceptions, ContextVar, event_from_exception, @@ -497,7 +498,7 @@ def get_traceparent(self, *args, **kwargs): Returns the Sentry "sentry-trace" header (aka the traceparent) from the currently active span or the scopes Propagation Context. """ - client = Scope.get_client() + client = self.get_client() # If we have an active span, return traceparent from there if has_tracing_enabled(client.options) and self.span is not None: @@ -512,7 +513,7 @@ def get_traceparent(self, *args, **kwargs): return traceparent # Fall back to isolation scope's traceparent. It always has one - return Scope.get_isolation_scope().get_traceparent() + return self.get_isolation_scope().get_traceparent() def get_baggage(self, *args, **kwargs): # type: (Any, Any) -> Optional[Baggage] @@ -520,7 +521,7 @@ def get_baggage(self, *args, **kwargs): Returns the Sentry "baggage" header containing trace information from the currently active span or the scopes Propagation Context. """ - client = Scope.get_client() + client = self.get_client() # If we have an active span, return baggage from there if has_tracing_enabled(client.options) and self.span is not None: @@ -537,7 +538,7 @@ def get_baggage(self, *args, **kwargs): return Baggage(dynamic_sampling_context) # Fall back to isolation scope's baggage. It always has one - return Scope.get_isolation_scope().get_baggage() + return self.get_isolation_scope().get_baggage() def get_trace_context(self): # type: () -> Any @@ -609,7 +610,7 @@ def iter_trace_propagation_headers(self, *args, **kwargs): If a span is given, the trace data will taken from the span. If no span is given, the trace data is taken from the scope. """ - client = Scope.get_client() + client = self.get_client() if not client.options.get("propagate_traces"): return @@ -627,13 +628,13 @@ def iter_trace_propagation_headers(self, *args, **kwargs): yield header else: # otherwise try headers from current scope - current_scope = Scope.get_current_scope() + current_scope = self.get_current_scope() if current_scope._propagation_context is not None: for header in current_scope.iter_headers(): yield header else: # otherwise fall back to headers from isolation scope - isolation_scope = Scope.get_isolation_scope() + isolation_scope = self.get_isolation_scope() if isolation_scope._propagation_context is not None: for header in isolation_scope.iter_headers(): yield header @@ -643,11 +644,11 @@ def get_active_propagation_context(self): if self._propagation_context is not None: return self._propagation_context - current_scope = Scope.get_current_scope() + current_scope = self.get_current_scope() if current_scope._propagation_context is not None: return current_scope._propagation_context - isolation_scope = Scope.get_isolation_scope() + isolation_scope = self.get_isolation_scope() if isolation_scope._propagation_context is not None: return isolation_scope._propagation_context @@ -779,7 +780,7 @@ def set_user(self, value): # type: (Optional[Dict[str, Any]]) -> None """Sets a user for the scope.""" self._user = value - session = Scope.get_isolation_scope()._session + session = self.get_isolation_scope()._session if session is not None: session.update(user=value) @@ -893,14 +894,17 @@ def clear_breadcrumbs(self): def add_attachment( self, - bytes=None, # type: Optional[bytes] + bytes=None, # type: Union[None, bytes, Callable[[], bytes]] filename=None, # type: Optional[str] path=None, # type: Optional[str] content_type=None, # type: Optional[str] add_to_transactions=False, # type: bool ): # type: (...) -> None - """Adds an attachment to future events sent.""" + """Adds an attachment to future events sent from this scope. + + The parameters are the same as for the :py:class:`sentry_sdk.attachments.Attachment` constructor. + """ self._attachments.append( Attachment( bytes=bytes, @@ -921,7 +925,7 @@ def add_breadcrumb(self, crumb=None, hint=None, **kwargs): :param hint: An optional value that can be used by `before_breadcrumb` to customize the breadcrumbs that are emitted. """ - client = Scope.get_client() + client = self.get_client() if not client.is_active(): logger.info("Dropped breadcrumb because no client bound") @@ -990,7 +994,7 @@ def start_transaction( """ kwargs.setdefault("scope", self) - client = Scope.get_client() + client = self.get_client() try_autostart_continuous_profiler() @@ -1053,7 +1057,7 @@ def start_span(self, **kwargs): kwargs.setdefault("scope", self) # get current span or transaction - span = self.span or Scope.get_isolation_scope().span + span = self.span or self.get_isolation_scope().span if span is None: # New spans get the `trace_id` from the scope @@ -1110,7 +1114,7 @@ def capture_event(self, event, hint=None, scope=None, **scope_kwargs): """ scope = self._merge_scopes(scope, scope_kwargs) - event_id = Scope.get_client().capture_event(event=event, hint=hint, scope=scope) + event_id = self.get_client().capture_event(event=event, hint=hint, scope=scope) if event_id is not None and event.get("type") != "transaction": self.get_isolation_scope()._last_event_id = event_id @@ -1166,27 +1170,16 @@ def capture_exception(self, error=None, scope=None, **scope_kwargs): exc_info = sys.exc_info() event, hint = event_from_exception( - exc_info, client_options=Scope.get_client().options + exc_info, client_options=self.get_client().options ) try: return self.capture_event(event, hint=hint, scope=scope, **scope_kwargs) except Exception: - self._capture_internal_exception(sys.exc_info()) + capture_internal_exception(sys.exc_info()) return None - @staticmethod - def _capture_internal_exception(exc_info): - # type: (ExcInfo) -> None - """ - Capture an exception that is likely caused by a bug in the SDK - itself. - - These exceptions do not end up in Sentry and are just logged instead. - """ - logger.error("Internal error in sentry_sdk", exc_info=exc_info) - def start_session(self, *args, **kwargs): # type: (*Any, **Any) -> None """Starts a new session.""" @@ -1194,7 +1187,7 @@ def start_session(self, *args, **kwargs): self.end_session() - client = Scope.get_client() + client = self.get_client() self._session = Session( release=client.options.get("release"), environment=client.options.get("environment"), @@ -1210,7 +1203,7 @@ def end_session(self, *args, **kwargs): if session is not None: session.close() - Scope.get_client().capture_session(session) + self.get_client().capture_session(session) def stop_auto_session_tracking(self, *args, **kwargs): # type: (*Any, **Any) -> None @@ -1344,9 +1337,9 @@ def run_error_processors(self, event, hint): exc_info = hint.get("exc_info") if exc_info is not None: error_processors = chain( - Scope.get_global_scope()._error_processors, - Scope.get_isolation_scope()._error_processors, - Scope.get_current_scope()._error_processors, + self.get_global_scope()._error_processors, + self.get_isolation_scope()._error_processors, + self.get_current_scope()._error_processors, ) for error_processor in error_processors: diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py index 03ae08d23d..e8312828c1 100644 --- a/sentry_sdk/tracing.py +++ b/sentry_sdk/tracing.py @@ -358,7 +358,7 @@ def __repr__(self): def __enter__(self): # type: () -> Span - scope = self.scope or sentry_sdk.Scope.get_current_scope() + scope = self.scope or sentry_sdk.get_current_scope() old_span = scope.span scope.span = self self._context_manager_state = (scope, old_span) @@ -628,7 +628,7 @@ def finish(self, scope=None, end_timestamp=None): except AttributeError: self.timestamp = datetime.now(timezone.utc) - scope = scope or sentry_sdk.Scope.get_current_scope() + scope = scope or sentry_sdk.get_current_scope() maybe_create_breadcrumbs_from_span(scope, self) return None @@ -896,8 +896,8 @@ def finish( scope, hub ) # type: Optional[sentry_sdk.Scope] - scope = scope or self.scope or sentry_sdk.Scope.get_current_scope() - client = sentry_sdk.Scope.get_client() + scope = scope or self.scope or sentry_sdk.get_current_scope() + client = sentry_sdk.get_client() if not client.is_active(): # We have no active client and therefore nowhere to send this transaction. @@ -1056,7 +1056,7 @@ def _set_initial_sampling_decision(self, sampling_context): 4. If `traces_sampler` is not defined and there's no parent sampling decision, `traces_sample_rate` will be used. """ - client = sentry_sdk.Scope.get_client() + client = sentry_sdk.get_client() transaction_description = "{op}transaction <{name}>".format( op=("<" + self.op + "> " if self.op else ""), name=self.name diff --git a/sentry_sdk/tracing_utils.py b/sentry_sdk/tracing_utils.py index 4a50f50810..0dabfbc486 100644 --- a/sentry_sdk/tracing_utils.py +++ b/sentry_sdk/tracing_utils.py @@ -524,7 +524,7 @@ def populate_from_transaction(cls, transaction): Populate fresh baggage entry with sentry_items and make it immutable if this is the head SDK which originates traces. """ - client = sentry_sdk.Scope.get_client() + client = sentry_sdk.get_client() sentry_items = {} # type: Dict[str, str] if not client.is_active(): @@ -691,7 +691,7 @@ def get_current_span(scope=None): """ Returns the currently active span if there is one running, otherwise `None` """ - scope = scope or sentry_sdk.Scope.get_current_scope() + scope = scope or sentry_sdk.get_current_scope() current_span = scope.span return current_span diff --git a/sentry_sdk/utils.py b/sentry_sdk/utils.py index 8a805d3d64..862eedae9c 100644 --- a/sentry_sdk/utils.py +++ b/sentry_sdk/utils.py @@ -25,7 +25,6 @@ BaseExceptionGroup = None # type: ignore import sentry_sdk -import sentry_sdk.hub from sentry_sdk._compat import PY37 from sentry_sdk._types import TYPE_CHECKING from sentry_sdk.consts import DEFAULT_MAX_VALUE_LENGTH, EndpointType @@ -55,7 +54,6 @@ from gevent.hub import Hub - import sentry_sdk.integrations from sentry_sdk._types import Event, ExcInfo P = ParamSpec("P") @@ -191,8 +189,14 @@ def capture_internal_exceptions(): def capture_internal_exception(exc_info): # type: (ExcInfo) -> None + """ + Capture an exception that is likely caused by a bug in the SDK + itself. + + These exceptions do not end up in Sentry and are just logged instead. + """ if sentry_sdk.get_client().is_active(): - sentry_sdk.Scope._capture_internal_exception(exc_info) + logger.error("Internal error in sentry_sdk", exc_info=exc_info) def to_timestamp(value): diff --git a/setup.py b/setup.py index f419737d36..09b5cb803e 100644 --- a/setup.py +++ b/setup.py @@ -21,7 +21,7 @@ def get_file_text(file_name): setup( name="sentry-sdk", - version="2.10.0", + version="2.11.0", author="Sentry Team and Contributors", author_email="hello@sentry.io", url="https://github.com/getsentry/sentry-python", @@ -65,61 +65,7 @@ def get_file_text(file_name): "loguru": ["loguru>=0.5"], "openai": ["openai>=1.0.0", "tiktoken>=0.3.0"], "opentelemetry": ["opentelemetry-distro>=0.35b0"], - "opentelemetry-experimental": [ - # There's an umbrella package called - # opentelemetry-contrib-instrumentations that installs all - # available instrumentation packages, however it's broken in recent - # versions (after 0.41b0), see - # https://github.com/open-telemetry/opentelemetry-python-contrib/issues/2053 - "opentelemetry-instrumentation-aio-pika==0.46b0", - "opentelemetry-instrumentation-aiohttp-client==0.46b0", - # "opentelemetry-instrumentation-aiohttp-server==0.46b0", # broken package - "opentelemetry-instrumentation-aiopg==0.46b0", - "opentelemetry-instrumentation-asgi==0.46b0", - "opentelemetry-instrumentation-asyncio==0.46b0", - "opentelemetry-instrumentation-asyncpg==0.46b0", - "opentelemetry-instrumentation-aws-lambda==0.46b0", - "opentelemetry-instrumentation-boto==0.46b0", - "opentelemetry-instrumentation-boto3sqs==0.46b0", - "opentelemetry-instrumentation-botocore==0.46b0", - "opentelemetry-instrumentation-cassandra==0.46b0", - "opentelemetry-instrumentation-celery==0.46b0", - "opentelemetry-instrumentation-confluent-kafka==0.46b0", - "opentelemetry-instrumentation-dbapi==0.46b0", - "opentelemetry-instrumentation-django==0.46b0", - "opentelemetry-instrumentation-elasticsearch==0.46b0", - "opentelemetry-instrumentation-falcon==0.46b0", - "opentelemetry-instrumentation-fastapi==0.46b0", - "opentelemetry-instrumentation-flask==0.46b0", - "opentelemetry-instrumentation-grpc==0.46b0", - "opentelemetry-instrumentation-httpx==0.46b0", - "opentelemetry-instrumentation-jinja2==0.46b0", - "opentelemetry-instrumentation-kafka-python==0.46b0", - "opentelemetry-instrumentation-logging==0.46b0", - "opentelemetry-instrumentation-mysql==0.46b0", - "opentelemetry-instrumentation-mysqlclient==0.46b0", - "opentelemetry-instrumentation-pika==0.46b0", - "opentelemetry-instrumentation-psycopg==0.46b0", - "opentelemetry-instrumentation-psycopg2==0.46b0", - "opentelemetry-instrumentation-pymemcache==0.46b0", - "opentelemetry-instrumentation-pymongo==0.46b0", - "opentelemetry-instrumentation-pymysql==0.46b0", - "opentelemetry-instrumentation-pyramid==0.46b0", - "opentelemetry-instrumentation-redis==0.46b0", - "opentelemetry-instrumentation-remoulade==0.46b0", - "opentelemetry-instrumentation-requests==0.46b0", - "opentelemetry-instrumentation-sklearn==0.46b0", - "opentelemetry-instrumentation-sqlalchemy==0.46b0", - "opentelemetry-instrumentation-sqlite3==0.46b0", - "opentelemetry-instrumentation-starlette==0.46b0", - "opentelemetry-instrumentation-system-metrics==0.46b0", - "opentelemetry-instrumentation-threading==0.46b0", - "opentelemetry-instrumentation-tornado==0.46b0", - "opentelemetry-instrumentation-tortoiseorm==0.46b0", - "opentelemetry-instrumentation-urllib==0.46b0", - "opentelemetry-instrumentation-urllib3==0.46b0", - "opentelemetry-instrumentation-wsgi==0.46b0", - ], + "opentelemetry-experimental": ["opentelemetry-distro"], "pure_eval": ["pure_eval", "executing", "asttokens"], "pymongo": ["pymongo>=3.1"], "pyspark": ["pyspark>=2.4.4"], diff --git a/tests/conftest.py b/tests/conftest.py index 52e0c75c5c..c31a394fb5 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -21,9 +21,11 @@ eventlet = None import sentry_sdk +import sentry_sdk.utils from sentry_sdk.envelope import Envelope from sentry_sdk.integrations import ( # noqa: F401 _DEFAULT_INTEGRATIONS, + _installed_integrations, _processed_integrations, ) from sentry_sdk.profiler import teardown_profiler @@ -74,12 +76,11 @@ def clean_scopes(): @pytest.fixture(autouse=True) -def internal_exceptions(request, monkeypatch): +def internal_exceptions(request): errors = [] if "tests_internal_exceptions" in request.keywords: return - @staticmethod def _capture_internal_exception(exc_info): errors.append(exc_info) @@ -90,9 +91,7 @@ def _(): for e in errors: reraise(*e) - monkeypatch.setattr( - sentry_sdk.Scope, "_capture_internal_exception", _capture_internal_exception - ) + sentry_sdk.utils.capture_internal_exception = _capture_internal_exception return errors @@ -182,6 +181,7 @@ def reset_integrations(): except ValueError: pass _processed_integrations.clear() + _installed_integrations.clear() @pytest.fixture @@ -189,7 +189,7 @@ def sentry_init(request): def inner(*a, **kw): kw.setdefault("transport", TestTransport()) client = sentry_sdk.Client(*a, **kw) - sentry_sdk.Scope.get_global_scope().set_client(client) + sentry_sdk.get_global_scope().set_client(client) if request.node.get_closest_marker("forked"): # Do not run isolation if the test is already running in @@ -197,12 +197,12 @@ def inner(*a, **kw): # fork) yield inner else: - old_client = sentry_sdk.Scope.get_global_scope().client + old_client = sentry_sdk.get_global_scope().client try: - sentry_sdk.Scope.get_current_scope().set_client(None) + sentry_sdk.get_current_scope().set_client(None) yield inner finally: - sentry_sdk.Scope.get_global_scope().set_client(old_client) + sentry_sdk.get_global_scope().set_client(old_client) class TestTransport(Transport): diff --git a/tests/integrations/celery/test_celery.py b/tests/integrations/celery/test_celery.py index 117d52c81f..cc0bfd0390 100644 --- a/tests/integrations/celery/test_celery.py +++ b/tests/integrations/celery/test_celery.py @@ -6,7 +6,8 @@ from celery import Celery, VERSION from celery.bin import worker -from sentry_sdk import configure_scope, start_transaction, get_current_span +import sentry_sdk +from sentry_sdk import start_transaction, get_current_span from sentry_sdk.integrations.celery import ( CeleryIntegration, _wrap_apply_async, @@ -154,30 +155,31 @@ def dummy_task(x, y): foo = 42 # noqa return x / y - with configure_scope() as scope: - celery_invocation(dummy_task, 1, 2) - _, expected_context = celery_invocation(dummy_task, 1, 0) + scope = sentry_sdk.get_isolation_scope() - (error_event,) = events + celery_invocation(dummy_task, 1, 2) + _, expected_context = celery_invocation(dummy_task, 1, 0) - assert ( - error_event["contexts"]["trace"]["trace_id"] - == scope._propagation_context.trace_id - ) - assert ( - error_event["contexts"]["trace"]["span_id"] - != scope._propagation_context.span_id - ) - assert error_event["transaction"] == "dummy_task" - assert "celery_task_id" in error_event["tags"] - assert error_event["extra"]["celery-job"] == dict( - task_name="dummy_task", **expected_context - ) + (error_event,) = events - (exception,) = error_event["exception"]["values"] - assert exception["type"] == "ZeroDivisionError" - assert exception["mechanism"]["type"] == "celery" - assert exception["stacktrace"]["frames"][0]["vars"]["foo"] == "42" + assert ( + error_event["contexts"]["trace"]["trace_id"] + == scope._propagation_context.trace_id + ) + assert ( + error_event["contexts"]["trace"]["span_id"] + != scope._propagation_context.span_id + ) + assert error_event["transaction"] == "dummy_task" + assert "celery_task_id" in error_event["tags"] + assert error_event["extra"]["celery-job"] == dict( + task_name="dummy_task", **expected_context + ) + + (exception,) = error_event["exception"]["values"] + assert exception["type"] == "ZeroDivisionError" + assert exception["mechanism"]["type"] == "celery" + assert exception["stacktrace"]["frames"][0]["vars"]["foo"] == "42" @pytest.mark.parametrize("task_fails", [True, False], ids=["error", "success"]) @@ -255,18 +257,14 @@ def test_no_stackoverflows(celery): @celery.task(name="dummy_task") def dummy_task(): - with configure_scope() as scope: - scope.set_tag("foo", "bar") - + sentry_sdk.get_isolation_scope().set_tag("foo", "bar") results.append(42) for _ in range(10000): dummy_task.delay() assert results == [42] * 10000 - - with configure_scope() as scope: - assert not scope._tags + assert not sentry_sdk.get_isolation_scope()._tags def test_simple_no_propagation(capture_events, init_celery): diff --git a/tests/integrations/celery/test_update_celery_task_headers.py b/tests/integrations/celery/test_update_celery_task_headers.py index 1680e54d80..705c00de58 100644 --- a/tests/integrations/celery/test_update_celery_task_headers.py +++ b/tests/integrations/celery/test_update_celery_task_headers.py @@ -139,7 +139,7 @@ def test_celery_trace_propagation_default(sentry_init, monitor_beat_tasks): headers = {} span = None - scope = sentry_sdk.Scope.get_isolation_scope() + scope = sentry_sdk.get_isolation_scope() outgoing_headers = _update_celery_task_headers(headers, span, monitor_beat_tasks) @@ -175,7 +175,7 @@ def test_celery_trace_propagation_traces_sample_rate( headers = {} span = None - scope = sentry_sdk.Scope.get_isolation_scope() + scope = sentry_sdk.get_isolation_scope() outgoing_headers = _update_celery_task_headers(headers, span, monitor_beat_tasks) @@ -211,7 +211,7 @@ def test_celery_trace_propagation_enable_tracing( headers = {} span = None - scope = sentry_sdk.Scope.get_isolation_scope() + scope = sentry_sdk.get_isolation_scope() outgoing_headers = _update_celery_task_headers(headers, span, monitor_beat_tasks) diff --git a/tests/integrations/django/myapp/views.py b/tests/integrations/django/myapp/views.py index dcd630363b..c1950059fe 100644 --- a/tests/integrations/django/myapp/views.py +++ b/tests/integrations/django/myapp/views.py @@ -191,15 +191,13 @@ def template_test2(request, *args, **kwargs): @csrf_exempt def template_test3(request, *args, **kwargs): - from sentry_sdk import Scope - - traceparent = Scope.get_current_scope().get_traceparent() + traceparent = sentry_sdk.get_current_scope().get_traceparent() if traceparent is None: - traceparent = Scope.get_isolation_scope().get_traceparent() + traceparent = sentry_sdk.get_isolation_scope().get_traceparent() - baggage = Scope.get_current_scope().get_baggage() + baggage = sentry_sdk.get_current_scope().get_baggage() if baggage is None: - baggage = Scope.get_isolation_scope().get_baggage() + baggage = sentry_sdk.get_isolation_scope().get_baggage() capture_message(traceparent + "\n" + baggage.serialize()) return render(request, "trace_meta.html", {}) diff --git a/tests/integrations/django/test_basic.py b/tests/integrations/django/test_basic.py index 1505204f28..45c25595f3 100644 --- a/tests/integrations/django/test_basic.py +++ b/tests/integrations/django/test_basic.py @@ -16,13 +16,13 @@ except ImportError: from django.core.urlresolvers import reverse +import sentry_sdk from sentry_sdk._compat import PY310 from sentry_sdk import capture_message, capture_exception from sentry_sdk.consts import SPANDATA from sentry_sdk.integrations.django import DjangoIntegration, _set_db_data from sentry_sdk.integrations.django.signals_handlers import _get_receiver_name from sentry_sdk.integrations.executing import ExecutingIntegration -from sentry_sdk.scope import Scope from sentry_sdk.tracing import Span from tests.conftest import unpack_werkzeug_response from tests.integrations.django.myapp.wsgi import application @@ -342,7 +342,7 @@ def test_sql_queries(sentry_init, capture_events, with_integration): sql = connection.cursor() - Scope.get_isolation_scope().clear_breadcrumbs() + sentry_sdk.get_isolation_scope().clear_breadcrumbs() with pytest.raises(OperationalError): # table doesn't even exist @@ -376,7 +376,7 @@ def test_sql_dict_query_params(sentry_init, capture_events): sql = connections["postgres"].cursor() events = capture_events() - Scope.get_isolation_scope().clear_breadcrumbs() + sentry_sdk.get_isolation_scope().clear_breadcrumbs() with pytest.raises(ProgrammingError): sql.execute( @@ -441,7 +441,7 @@ def test_sql_psycopg2_string_composition(sentry_init, capture_events, query): sql = connections["postgres"].cursor() - Scope.get_isolation_scope().clear_breadcrumbs() + sentry_sdk.get_isolation_scope().clear_breadcrumbs() events = capture_events() @@ -474,7 +474,7 @@ def test_sql_psycopg2_placeholders(sentry_init, capture_events): sql = connections["postgres"].cursor() events = capture_events() - Scope.get_isolation_scope().clear_breadcrumbs() + sentry_sdk.get_isolation_scope().clear_breadcrumbs() with pytest.raises(DataError): names = ["foo", "bar"] diff --git a/tests/integrations/falcon/test_falcon.py b/tests/integrations/falcon/test_falcon.py index c88a95a531..0607d3fdeb 100644 --- a/tests/integrations/falcon/test_falcon.py +++ b/tests/integrations/falcon/test_falcon.py @@ -7,7 +7,6 @@ import sentry_sdk from sentry_sdk.integrations.falcon import FalconIntegration from sentry_sdk.integrations.logging import LoggingIntegration -from sentry_sdk.scope import Scope from sentry_sdk.utils import parse_version @@ -380,17 +379,17 @@ def test_does_not_leak_scope(sentry_init, capture_events): sentry_init(integrations=[FalconIntegration()]) events = capture_events() - Scope.get_isolation_scope().set_tag("request_data", False) + sentry_sdk.get_isolation_scope().set_tag("request_data", False) app = falcon.API() class Resource: def on_get(self, req, resp): - Scope.get_isolation_scope().set_tag("request_data", True) + sentry_sdk.get_isolation_scope().set_tag("request_data", True) def generator(): for row in range(1000): - assert Scope.get_isolation_scope()._tags["request_data"] + assert sentry_sdk.get_isolation_scope()._tags["request_data"] yield (str(row) + "\n").encode() @@ -404,7 +403,7 @@ def generator(): expected_response = "".join(str(row) + "\n" for row in range(1000)) assert response.text == expected_response assert not events - assert not Scope.get_isolation_scope()._tags["request_data"] + assert not sentry_sdk.get_isolation_scope()._tags["request_data"] @pytest.mark.skipif( diff --git a/tests/integrations/flask/test_flask.py b/tests/integrations/flask/test_flask.py index c35bf2acb5..03a3b0b9d0 100644 --- a/tests/integrations/flask/test_flask.py +++ b/tests/integrations/flask/test_flask.py @@ -28,7 +28,6 @@ capture_exception, ) from sentry_sdk.integrations.logging import LoggingIntegration -from sentry_sdk.scope import Scope from sentry_sdk.serializer import MAX_DATABAG_BREADTH @@ -278,7 +277,7 @@ def test_flask_session_tracking(sentry_init, capture_envelopes, app): @app.route("/") def index(): - Scope.get_isolation_scope().set_user({"ip_address": "1.2.3.4", "id": "42"}) + sentry_sdk.get_isolation_scope().set_user({"ip_address": "1.2.3.4", "id": "42"}) try: raise ValueError("stuff") except Exception: @@ -666,15 +665,15 @@ def test_does_not_leak_scope(sentry_init, capture_events, app): sentry_init(integrations=[flask_sentry.FlaskIntegration()]) events = capture_events() - Scope.get_isolation_scope().set_tag("request_data", False) + sentry_sdk.get_isolation_scope().set_tag("request_data", False) @app.route("/") def index(): - Scope.get_isolation_scope().set_tag("request_data", True) + sentry_sdk.get_isolation_scope().set_tag("request_data", True) def generate(): for row in range(1000): - assert Scope.get_isolation_scope()._tags["request_data"] + assert sentry_sdk.get_isolation_scope()._tags["request_data"] yield str(row) + "\n" @@ -685,7 +684,7 @@ def generate(): assert response.data.decode() == "".join(str(row) + "\n" for row in range(1000)) assert not events - assert not Scope.get_isolation_scope()._tags["request_data"] + assert not sentry_sdk.get_isolation_scope()._tags["request_data"] def test_scoped_test_client(sentry_init, app): diff --git a/tests/integrations/graphene/test_graphene.py b/tests/integrations/graphene/test_graphene.py index 02bc34a515..5d54bb49cb 100644 --- a/tests/integrations/graphene/test_graphene.py +++ b/tests/integrations/graphene/test_graphene.py @@ -3,6 +3,7 @@ from flask import Flask, request, jsonify from graphene import ObjectType, String, Schema +from sentry_sdk.consts import OP from sentry_sdk.integrations.fastapi import FastApiIntegration from sentry_sdk.integrations.flask import FlaskIntegration from sentry_sdk.integrations.graphene import GrapheneIntegration @@ -201,3 +202,82 @@ def graphql_server_sync(): client.post("/graphql", json=query) assert len(events) == 0 + + +def test_graphql_span_holds_query_information(sentry_init, capture_events): + sentry_init( + integrations=[GrapheneIntegration(), FlaskIntegration()], + enable_tracing=True, + default_integrations=False, + ) + events = capture_events() + + schema = Schema(query=Query) + + sync_app = Flask(__name__) + + @sync_app.route("/graphql", methods=["POST"]) + def graphql_server_sync(): + data = request.get_json() + result = schema.execute(data["query"], operation_name=data.get("operationName")) + return jsonify(result.data), 200 + + query = { + "query": "query GreetingQuery { hello }", + "operationName": "GreetingQuery", + } + client = sync_app.test_client() + client.post("/graphql", json=query) + + assert len(events) == 1 + + (event,) = events + assert len(event["spans"]) == 1 + + (span,) = event["spans"] + assert span["op"] == OP.GRAPHQL_QUERY + assert span["description"] == query["operationName"] + assert span["data"]["graphql.document"] == query["query"] + assert span["data"]["graphql.operation.name"] == query["operationName"] + assert span["data"]["graphql.operation.type"] == "query" + + +def test_breadcrumbs_hold_query_information_on_error(sentry_init, capture_events): + sentry_init( + integrations=[ + GrapheneIntegration(), + ], + default_integrations=False, + ) + events = capture_events() + + schema = Schema(query=Query) + + sync_app = Flask(__name__) + + @sync_app.route("/graphql", methods=["POST"]) + def graphql_server_sync(): + data = request.get_json() + result = schema.execute(data["query"], operation_name=data.get("operationName")) + return jsonify(result.data), 200 + + query = { + "query": "query ErrorQuery { goodbye }", + "operationName": "ErrorQuery", + } + client = sync_app.test_client() + client.post("/graphql", json=query) + + assert len(events) == 1 + + (event,) = events + assert len(event["breadcrumbs"]) == 1 + + breadcrumbs = event["breadcrumbs"]["values"] + assert len(breadcrumbs) == 1 + + (breadcrumb,) = breadcrumbs + assert breadcrumb["category"] == "graphql.operation" + assert breadcrumb["data"]["operation_name"] == query["operationName"] + assert breadcrumb["data"]["operation_type"] == "query" + assert breadcrumb["type"] == "default" diff --git a/tests/integrations/loguru/test_loguru.py b/tests/integrations/loguru/test_loguru.py index 98b8cb4dee..6030108de1 100644 --- a/tests/integrations/loguru/test_loguru.py +++ b/tests/integrations/loguru/test_loguru.py @@ -54,7 +54,7 @@ def test_just_log( if not created_event: assert not events - breadcrumbs = sentry_sdk.Scope.get_isolation_scope()._breadcrumbs + breadcrumbs = sentry_sdk.get_isolation_scope()._breadcrumbs if ( not disable_breadcrumbs and created_event is not None ): # not None == not TRACE or DEBUG level @@ -92,7 +92,7 @@ def test_breadcrumb_format(sentry_init, capture_events): logger.info("test") formatted_message = "test" - breadcrumbs = sentry_sdk.Scope.get_isolation_scope()._breadcrumbs + breadcrumbs = sentry_sdk.get_isolation_scope()._breadcrumbs (breadcrumb,) = breadcrumbs assert breadcrumb["message"] == formatted_message diff --git a/tests/integrations/opentelemetry/test_experimental.py b/tests/integrations/opentelemetry/test_experimental.py index 856858c599..8e4b703361 100644 --- a/tests/integrations/opentelemetry/test_experimental.py +++ b/tests/integrations/opentelemetry/test_experimental.py @@ -2,28 +2,6 @@ import pytest -try: - from flask import Flask - from fastapi import FastAPI -except ImportError: - pass - - -try: - import opentelemetry.instrumentation.asyncio # noqa: F401 - - # We actually expect all OTel instrumentation packages to be available, but - # for simplicity we just check for one here. - instrumentation_packages_installed = True -except ImportError: - instrumentation_packages_installed = False - - -needs_potel = pytest.mark.skipif( - not instrumentation_packages_installed, - reason="needs OTel instrumentor libraries installed", -) - @pytest.mark.forked def test_integration_enabled_if_option_is_on(sentry_init, reset_integrations): @@ -67,57 +45,3 @@ def test_integration_not_enabled_if_option_is_missing(sentry_init, reset_integra ): sentry_init() mocked_setup_once.assert_not_called() - - -@pytest.mark.forked -@needs_potel -def test_instrumentors_applied(sentry_init, reset_integrations): - flask_instrument_mock = MagicMock() - fastapi_instrument_mock = MagicMock() - - with patch( - "opentelemetry.instrumentation.flask.FlaskInstrumentor.instrument", - flask_instrument_mock, - ): - with patch( - "opentelemetry.instrumentation.fastapi.FastAPIInstrumentor.instrument", - fastapi_instrument_mock, - ): - sentry_init( - _experiments={ - "otel_powered_performance": True, - }, - ) - - flask_instrument_mock.assert_called_once() - fastapi_instrument_mock.assert_called_once() - - -@pytest.mark.forked -@needs_potel -def test_post_patching(sentry_init, reset_integrations): - assert not hasattr( - Flask(__name__), "_is_instrumented_by_opentelemetry" - ), "Flask is not patched at the start" - assert not hasattr( - FastAPI(), "_is_instrumented_by_opentelemetry" - ), "FastAPI is not patched at the start" - - sentry_init( - _experiments={ - "otel_powered_performance": True, - }, - ) - - flask = Flask(__name__) - fastapi = FastAPI() - - assert hasattr( - flask, "_is_instrumented_by_opentelemetry" - ), "Flask has been patched after init()" - assert flask._is_instrumented_by_opentelemetry is True - - assert hasattr( - fastapi, "_is_instrumented_by_opentelemetry" - ), "FastAPI has been patched after init()" - assert fastapi._is_instrumented_by_opentelemetry is True diff --git a/tests/integrations/opentelemetry/test_span_processor.py b/tests/integrations/opentelemetry/test_span_processor.py index 12b1e91ea4..218b0434e9 100644 --- a/tests/integrations/opentelemetry/test_span_processor.py +++ b/tests/integrations/opentelemetry/test_span_processor.py @@ -5,12 +5,12 @@ from opentelemetry.trace import SpanKind, SpanContext, Status, StatusCode +import sentry_sdk from sentry_sdk.integrations.opentelemetry.span_processor import ( SentrySpanProcessor, link_trace_context_to_error_event, ) from sentry_sdk.integrations.opentelemetry.utils import is_sentry_span -from sentry_sdk.scope import Scope from sentry_sdk.tracing import Span, Transaction from sentry_sdk.tracing_utils import extract_sentrytrace_data @@ -22,7 +22,7 @@ def test_is_sentry_span(): client = MagicMock() client.dsn = "https://1234567890abcdef@o123456.ingest.sentry.io/123456" - Scope.get_global_scope().set_client(client) + sentry_sdk.get_global_scope().set_client(client) assert not is_sentry_span(otel_span) @@ -282,7 +282,7 @@ def test_on_start_transaction(): fake_client = MagicMock() fake_client.dsn = "https://1234567890abcdef@o123456.ingest.sentry.io/123456" - Scope.get_global_scope().set_client(fake_client) + sentry_sdk.get_global_scope().set_client(fake_client) with mock.patch( "sentry_sdk.integrations.opentelemetry.span_processor.start_transaction", @@ -324,7 +324,7 @@ def test_on_start_child(): fake_client = MagicMock() fake_client.dsn = "https://1234567890abcdef@o123456.ingest.sentry.io/123456" - Scope.get_global_scope().set_client(fake_client) + sentry_sdk.get_global_scope().set_client(fake_client) fake_span = MagicMock() @@ -387,7 +387,7 @@ def test_on_end_sentry_transaction(): otel_span.get_span_context.return_value = span_context fake_client = MagicMock() - Scope.get_global_scope().set_client(fake_client) + sentry_sdk.get_global_scope().set_client(fake_client) fake_sentry_span = MagicMock(spec=Transaction) fake_sentry_span.set_context = MagicMock() @@ -423,7 +423,7 @@ def test_on_end_sentry_span(): otel_span.get_span_context.return_value = span_context fake_client = MagicMock() - Scope.get_global_scope().set_client(fake_client) + sentry_sdk.get_global_scope().set_client(fake_client) fake_sentry_span = MagicMock(spec=Span) fake_sentry_span.set_context = MagicMock() @@ -448,7 +448,7 @@ def test_link_trace_context_to_error_event(): Test that the trace context is added to the error event. """ fake_client = MagicMock() - Scope.get_global_scope().set_client(fake_client) + sentry_sdk.get_global_scope().set_client(fake_client) span_id = "1234567890abcdef" trace_id = "1234567890abcdef1234567890abcdef" @@ -506,7 +506,7 @@ def test_pruning_old_spans_on_start(): fake_client = MagicMock() fake_client.options = {"debug": False} fake_client.dsn = "https://1234567890abcdef@o123456.ingest.sentry.io/123456" - Scope.get_global_scope().set_client(fake_client) + sentry_sdk.get_global_scope().set_client(fake_client) span_processor = SentrySpanProcessor() @@ -547,7 +547,7 @@ def test_pruning_old_spans_on_end(): otel_span.parent.span_id = int("abcdef1234567890", 16) fake_client = MagicMock() - Scope.get_global_scope().set_client(fake_client) + sentry_sdk.get_global_scope().set_client(fake_client) fake_sentry_span = MagicMock(spec=Span) fake_sentry_span.set_context = MagicMock() diff --git a/tests/integrations/quart/test_quart.py b/tests/integrations/quart/test_quart.py index d4b4c61d97..321f07e3c6 100644 --- a/tests/integrations/quart/test_quart.py +++ b/tests/integrations/quart/test_quart.py @@ -4,6 +4,7 @@ import pytest import pytest_asyncio +import sentry_sdk from sentry_sdk import ( set_tag, capture_message, @@ -11,7 +12,6 @@ ) from sentry_sdk.integrations.logging import LoggingIntegration import sentry_sdk.integrations.quart as quart_sentry -from sentry_sdk.scope import Scope from quart import Quart, Response, abort, stream_with_context from quart.views import View @@ -378,15 +378,15 @@ async def test_does_not_leak_scope(sentry_init, capture_events, app): sentry_init(integrations=[quart_sentry.QuartIntegration()]) events = capture_events() - Scope.get_isolation_scope().set_tag("request_data", False) + sentry_sdk.get_isolation_scope().set_tag("request_data", False) @app.route("/") async def index(): - Scope.get_isolation_scope().set_tag("request_data", True) + sentry_sdk.get_isolation_scope().set_tag("request_data", True) async def generate(): for row in range(1000): - assert Scope.get_isolation_scope()._tags["request_data"] + assert sentry_sdk.get_isolation_scope()._tags["request_data"] yield str(row) + "\n" @@ -398,7 +398,7 @@ async def generate(): str(row) + "\n" for row in range(1000) ) assert not events - assert not Scope.get_isolation_scope()._tags["request_data"] + assert not sentry_sdk.get_isolation_scope()._tags["request_data"] @pytest.mark.asyncio diff --git a/tests/integrations/rq/test_rq.py b/tests/integrations/rq/test_rq.py index 02db5eba8e..e445b588be 100644 --- a/tests/integrations/rq/test_rq.py +++ b/tests/integrations/rq/test_rq.py @@ -4,9 +4,9 @@ import rq from fakeredis import FakeStrictRedis +import sentry_sdk from sentry_sdk import start_transaction from sentry_sdk.integrations.rq import RqIntegration -from sentry_sdk.scope import Scope from sentry_sdk.utils import parse_version @@ -181,7 +181,7 @@ def test_tracing_disabled( queue = rq.Queue(connection=FakeStrictRedis()) worker = rq.SimpleWorker([queue], connection=queue.connection) - scope = Scope.get_isolation_scope() + scope = sentry_sdk.get_isolation_scope() queue.enqueue(crashing_job, foo=None) worker.work(burst=True) diff --git a/tests/integrations/sanic/test_sanic.py b/tests/integrations/sanic/test_sanic.py index 574fd673bb..598bae0134 100644 --- a/tests/integrations/sanic/test_sanic.py +++ b/tests/integrations/sanic/test_sanic.py @@ -7,9 +7,9 @@ import pytest +import sentry_sdk from sentry_sdk import capture_message from sentry_sdk.integrations.sanic import SanicIntegration -from sentry_sdk.scope import Scope from sentry_sdk.tracing import TRANSACTION_SOURCE_COMPONENT, TRANSACTION_SOURCE_URL from sanic import Sanic, request, response, __version__ as SANIC_VERSION_RAW @@ -234,12 +234,12 @@ def test_concurrency(sentry_init, app): @app.route("/context-check/") async def context_check(request, i): - scope = Scope.get_isolation_scope() + scope = sentry_sdk.get_isolation_scope() scope.set_tag("i", i) await asyncio.sleep(random.random()) - scope = Scope.get_isolation_scope() + scope = sentry_sdk.get_isolation_scope() assert scope._tags["i"] == i return response.text("ok") @@ -329,7 +329,7 @@ async def runner(): else: asyncio.run(runner()) - scope = Scope.get_isolation_scope() + scope = sentry_sdk.get_isolation_scope() assert not scope._tags diff --git a/tests/integrations/sqlalchemy/test_sqlalchemy.py b/tests/integrations/sqlalchemy/test_sqlalchemy.py index cedb542e93..2b95fe02d4 100644 --- a/tests/integrations/sqlalchemy/test_sqlalchemy.py +++ b/tests/integrations/sqlalchemy/test_sqlalchemy.py @@ -9,10 +9,10 @@ from sqlalchemy.orm import relationship, sessionmaker from sqlalchemy import text +import sentry_sdk from sentry_sdk import capture_message, start_transaction from sentry_sdk.consts import DEFAULT_MAX_VALUE_LENGTH, SPANDATA from sentry_sdk.integrations.sqlalchemy import SqlalchemyIntegration -from sentry_sdk.scope import Scope from sentry_sdk.serializer import MAX_EVENT_BYTES from sentry_sdk.tracing_utils import record_sql_queries from sentry_sdk.utils import json_dumps @@ -235,7 +235,7 @@ def test_large_event_not_truncated(sentry_init, capture_events): long_str = "x" * (DEFAULT_MAX_VALUE_LENGTH + 10) - scope = Scope.get_isolation_scope() + scope = sentry_sdk.get_isolation_scope() @scope.add_event_processor def processor(event, hint): diff --git a/tests/integrations/threading/test_threading.py b/tests/integrations/threading/test_threading.py index 328d0708c4..2b6b280c1e 100644 --- a/tests/integrations/threading/test_threading.py +++ b/tests/integrations/threading/test_threading.py @@ -7,7 +7,6 @@ import sentry_sdk from sentry_sdk import capture_message from sentry_sdk.integrations.threading import ThreadingIntegration -from sentry_sdk.scope import Scope original_start = Thread.start original_run = Thread.run @@ -45,7 +44,7 @@ def test_propagates_hub(sentry_init, capture_events, propagate_hub): events = capture_events() def stage1(): - Scope.get_isolation_scope().set_tag("stage1", "true") + sentry_sdk.get_isolation_scope().set_tag("stage1", "true") t = Thread(target=stage2) t.start() diff --git a/tests/integrations/tornado/test_tornado.py b/tests/integrations/tornado/test_tornado.py index d379d3dae4..294f605f6a 100644 --- a/tests/integrations/tornado/test_tornado.py +++ b/tests/integrations/tornado/test_tornado.py @@ -2,9 +2,9 @@ import pytest +import sentry_sdk from sentry_sdk import start_transaction, capture_message from sentry_sdk.integrations.tornado import TornadoIntegration -from sentry_sdk.scope import Scope from tornado.web import RequestHandler, Application, HTTPError from tornado.testing import AsyncHTTPTestCase @@ -37,11 +37,11 @@ def bogustest(self): class CrashingHandler(RequestHandler): def get(self): - Scope.get_isolation_scope().set_tag("foo", "42") + sentry_sdk.get_isolation_scope().set_tag("foo", "42") 1 / 0 def post(self): - Scope.get_isolation_scope().set_tag("foo", "43") + sentry_sdk.get_isolation_scope().set_tag("foo", "43") 1 / 0 @@ -53,12 +53,12 @@ def get(self): class HelloHandler(RequestHandler): async def get(self): - Scope.get_isolation_scope().set_tag("foo", "42") + sentry_sdk.get_isolation_scope().set_tag("foo", "42") return b"hello" async def post(self): - Scope.get_isolation_scope().set_tag("foo", "43") + sentry_sdk.get_isolation_scope().set_tag("foo", "43") return b"hello" @@ -101,7 +101,7 @@ def test_basic(tornado_testcase, sentry_init, capture_events): ) assert event["transaction_info"] == {"source": "component"} - assert not Scope.get_isolation_scope()._tags + assert not sentry_sdk.get_isolation_scope()._tags @pytest.mark.parametrize( diff --git a/tests/test_ai_monitoring.py b/tests/test_ai_monitoring.py new file mode 100644 index 0000000000..5e7c7432fa --- /dev/null +++ b/tests/test_ai_monitoring.py @@ -0,0 +1,121 @@ +import pytest + +import sentry_sdk +from sentry_sdk.ai.monitoring import ai_track + + +def test_ai_track(sentry_init, capture_events): + sentry_init(traces_sample_rate=1.0) + events = capture_events() + + @ai_track("my tool") + def tool(**kwargs): + pass + + @ai_track("some test pipeline") + def pipeline(): + tool() + + with sentry_sdk.start_transaction(): + pipeline() + + transaction = events[0] + assert transaction["type"] == "transaction" + assert len(transaction["spans"]) == 2 + spans = transaction["spans"] + + ai_pipeline_span = spans[0] if spans[0]["op"] == "ai.pipeline" else spans[1] + ai_run_span = spans[0] if spans[0]["op"] == "ai.run" else spans[1] + + assert ai_pipeline_span["description"] == "some test pipeline" + assert ai_run_span["description"] == "my tool" + + +def test_ai_track_with_tags(sentry_init, capture_events): + sentry_init(traces_sample_rate=1.0) + events = capture_events() + + @ai_track("my tool") + def tool(**kwargs): + pass + + @ai_track("some test pipeline") + def pipeline(): + tool() + + with sentry_sdk.start_transaction(): + pipeline(sentry_tags={"user": "colin"}, sentry_data={"some_data": "value"}) + + transaction = events[0] + assert transaction["type"] == "transaction" + assert len(transaction["spans"]) == 2 + spans = transaction["spans"] + + ai_pipeline_span = spans[0] if spans[0]["op"] == "ai.pipeline" else spans[1] + ai_run_span = spans[0] if spans[0]["op"] == "ai.run" else spans[1] + + assert ai_pipeline_span["description"] == "some test pipeline" + print(ai_pipeline_span) + assert ai_pipeline_span["tags"]["user"] == "colin" + assert ai_pipeline_span["data"]["some_data"] == "value" + assert ai_run_span["description"] == "my tool" + + +@pytest.mark.asyncio +async def test_ai_track_async(sentry_init, capture_events): + sentry_init(traces_sample_rate=1.0) + events = capture_events() + + @ai_track("my async tool") + async def async_tool(**kwargs): + pass + + @ai_track("some async test pipeline") + async def async_pipeline(): + await async_tool() + + with sentry_sdk.start_transaction(): + await async_pipeline() + + transaction = events[0] + assert transaction["type"] == "transaction" + assert len(transaction["spans"]) == 2 + spans = transaction["spans"] + + ai_pipeline_span = spans[0] if spans[0]["op"] == "ai.pipeline" else spans[1] + ai_run_span = spans[0] if spans[0]["op"] == "ai.run" else spans[1] + + assert ai_pipeline_span["description"] == "some async test pipeline" + assert ai_run_span["description"] == "my async tool" + + +@pytest.mark.asyncio +async def test_ai_track_async_with_tags(sentry_init, capture_events): + sentry_init(traces_sample_rate=1.0) + events = capture_events() + + @ai_track("my async tool") + async def async_tool(**kwargs): + pass + + @ai_track("some async test pipeline") + async def async_pipeline(): + await async_tool() + + with sentry_sdk.start_transaction(): + await async_pipeline( + sentry_tags={"user": "czyber"}, sentry_data={"some_data": "value"} + ) + + transaction = events[0] + assert transaction["type"] == "transaction" + assert len(transaction["spans"]) == 2 + spans = transaction["spans"] + + ai_pipeline_span = spans[0] if spans[0]["op"] == "ai.pipeline" else spans[1] + ai_run_span = spans[0] if spans[0]["op"] == "ai.run" else spans[1] + + assert ai_pipeline_span["description"] == "some async test pipeline" + assert ai_pipeline_span["tags"]["user"] == "czyber" + assert ai_pipeline_span["data"]["some_data"] == "value" + assert ai_run_span["description"] == "my async tool" diff --git a/tests/test_api.py b/tests/test_api.py index a6c44260d7..ae194af7fd 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -11,10 +11,14 @@ is_initialized, start_transaction, set_tags, + configure_scope, + push_scope, + get_global_scope, + get_current_scope, + get_isolation_scope, ) from sentry_sdk.client import Client, NonRecordingClient -from sentry_sdk.scope import Scope @pytest.mark.forked @@ -33,7 +37,7 @@ def test_get_current_span_default_hub(sentry_init): assert get_current_span() is None - scope = Scope.get_current_scope() + scope = get_current_scope() fake_span = mock.MagicMock() scope.span = fake_span @@ -66,7 +70,7 @@ def test_traceparent_with_tracing_enabled(sentry_init): def test_traceparent_with_tracing_disabled(sentry_init): sentry_init() - propagation_context = Scope.get_isolation_scope()._propagation_context + propagation_context = get_isolation_scope()._propagation_context expected_traceparent = "%s-%s" % ( propagation_context.trace_id, propagation_context.span_id, @@ -77,7 +81,7 @@ def test_traceparent_with_tracing_disabled(sentry_init): @pytest.mark.forked def test_baggage_with_tracing_disabled(sentry_init): sentry_init(release="1.0.0", environment="dev") - propagation_context = Scope.get_isolation_scope()._propagation_context + propagation_context = get_isolation_scope()._propagation_context expected_baggage = ( "sentry-trace_id={},sentry-environment=dev,sentry-release=1.0.0".format( propagation_context.trace_id @@ -113,7 +117,7 @@ def test_continue_trace(sentry_init): with start_transaction(transaction): assert transaction.name == "some name" - propagation_context = Scope.get_isolation_scope()._propagation_context + propagation_context = get_isolation_scope()._propagation_context assert propagation_context.trace_id == transaction.trace_id == trace_id assert propagation_context.parent_span_id == parent_span_id assert propagation_context.parent_sampled == parent_sampled @@ -126,7 +130,7 @@ def test_continue_trace(sentry_init): def test_is_initialized(): assert not is_initialized() - scope = Scope.get_global_scope() + scope = get_global_scope() scope.set_client(Client()) assert is_initialized() @@ -179,3 +183,15 @@ def test_set_tags(sentry_init, capture_events): "tag2": "updated", "tag3": "new", }, "Updating tags with empty dict changed tags" + + +def test_configure_scope_deprecation(): + with pytest.warns(DeprecationWarning): + with configure_scope(): + ... + + +def test_push_scope_deprecation(): + with pytest.warns(DeprecationWarning): + with push_scope(): + ... diff --git a/tests/test_basics.py b/tests/test_basics.py index 2c31cfa3ae..cc4594d8ab 100644 --- a/tests/test_basics.py +++ b/tests/test_basics.py @@ -1,4 +1,5 @@ import datetime +import importlib import logging import os import sys @@ -7,14 +8,13 @@ import pytest from sentry_sdk.client import Client - from tests.conftest import patch_start_tracing_child import sentry_sdk import sentry_sdk.scope from sentry_sdk import ( + get_client, push_scope, - configure_scope, capture_event, capture_exception, capture_message, @@ -22,16 +22,18 @@ last_event_id, add_breadcrumb, isolation_scope, + new_scope, Hub, - Scope, ) from sentry_sdk.integrations import ( _AUTO_ENABLING_INTEGRATIONS, + _DEFAULT_INTEGRATIONS, Integration, setup_integrations, ) from sentry_sdk.integrations.logging import LoggingIntegration from sentry_sdk.integrations.redis import RedisIntegration +from sentry_sdk.integrations.stdlib import StdlibIntegration from sentry_sdk.scope import add_global_event_processor from sentry_sdk.utils import get_sdk_name, reraise from sentry_sdk.tracing_utils import has_tracing_enabled @@ -71,13 +73,11 @@ def test_processors(sentry_init, capture_events): sentry_init() events = capture_events() - with configure_scope() as scope: - - def error_processor(event, exc_info): - event["exception"]["values"][0]["value"] += " whatever" - return event + def error_processor(event, exc_info): + event["exception"]["values"][0]["value"] += " whatever" + return event - scope.add_error_processor(error_processor, ValueError) + sentry_sdk.get_isolation_scope().add_error_processor(error_processor, ValueError) try: raise ValueError("aha!") @@ -294,7 +294,7 @@ def before_breadcrumb(crumb, hint): add_breadcrumb(crumb=dict(foo=42)) -def test_push_scope(sentry_init, capture_events): +def test_push_scope(sentry_init, capture_events, suppress_deprecation_warnings): sentry_init() events = capture_events() @@ -311,7 +311,9 @@ def test_push_scope(sentry_init, capture_events): assert "exception" in event -def test_push_scope_null_client(sentry_init, capture_events): +def test_push_scope_null_client( + sentry_init, capture_events, suppress_deprecation_warnings +): """ This test can be removed when we remove push_scope and the Hub from the SDK. """ @@ -385,7 +387,7 @@ def test_breadcrumbs(sentry_init, capture_events): category="auth", message="Authenticated user %s" % i, level="info" ) - Scope.get_isolation_scope().clear() + sentry_sdk.get_isolation_scope().clear() capture_exception(ValueError()) (event,) = events @@ -429,9 +431,9 @@ def test_attachments(sentry_init, capture_envelopes): this_file = os.path.abspath(__file__.rstrip("c")) - with configure_scope() as scope: - scope.add_attachment(bytes=b"Hello World!", filename="message.txt") - scope.add_attachment(path=this_file) + scope = sentry_sdk.get_isolation_scope() + scope.add_attachment(bytes=b"Hello World!", filename="message.txt") + scope.add_attachment(path=this_file) capture_exception(ValueError()) @@ -456,6 +458,21 @@ def test_attachments(sentry_init, capture_envelopes): assert pyfile.payload.get_bytes() == f.read() +@pytest.mark.tests_internal_exceptions +def test_attachments_graceful_failure( + sentry_init, capture_envelopes, internal_exceptions +): + sentry_init() + envelopes = capture_envelopes() + + sentry_sdk.get_isolation_scope().add_attachment(path="non_existent") + capture_exception(ValueError()) + + (envelope,) = envelopes + assert len(envelope.items) == 2 + assert envelope.items[1].payload.get_bytes() == b"" + + def test_integration_scoping(sentry_init, capture_events): logger = logging.getLogger("test_basics") @@ -473,6 +490,51 @@ def test_integration_scoping(sentry_init, capture_events): assert not events +default_integrations = [ + getattr( + importlib.import_module(integration.rsplit(".", 1)[0]), + integration.rsplit(".", 1)[1], + ) + for integration in _DEFAULT_INTEGRATIONS +] + + +@pytest.mark.forked +@pytest.mark.parametrize( + "provided_integrations,default_integrations,disabled_integrations,expected_integrations", + [ + ([], False, None, set()), + ([], False, [], set()), + ([LoggingIntegration()], False, None, {LoggingIntegration}), + ([], True, None, set(default_integrations)), + ( + [], + True, + [LoggingIntegration(), StdlibIntegration], + set(default_integrations) - {LoggingIntegration, StdlibIntegration}, + ), + ], +) +def test_integrations( + sentry_init, + provided_integrations, + default_integrations, + disabled_integrations, + expected_integrations, + reset_integrations, +): + sentry_init( + integrations=provided_integrations, + default_integrations=default_integrations, + disabled_integrations=disabled_integrations, + auto_enabling_integrations=False, + debug=True, + ) + assert { + type(integration) for integration in get_client().integrations.values() + } == expected_integrations + + @pytest.mark.skip( reason="This test is not valid anymore, because with the new Scopes calling bind_client on the Hub sets the client on the global scope. This test should be removed once the Hub is removed" ) @@ -546,14 +608,14 @@ def before_send(event, hint): sentry_init(debug=True, before_send=before_send) events = capture_events() - with push_scope() as scope: + with new_scope() as scope: @scope.add_event_processor def foo(event, hint): event["message"] += "foo" return event - with push_scope() as scope: + with new_scope() as scope: @scope.add_event_processor def bar(event, hint): diff --git a/tests/test_client.py b/tests/test_client.py index 571912ab12..f6c2cec05c 100644 --- a/tests/test_client.py +++ b/tests/test_client.py @@ -21,6 +21,7 @@ capture_event, set_tag, ) +from sentry_sdk.utils import capture_internal_exception from sentry_sdk.integrations.executing import ExecutingIntegration from sentry_sdk.transport import Transport from sentry_sdk.serializer import MAX_DATABAG_BREADTH @@ -350,29 +351,24 @@ def test_simple_transport(sentry_init): def test_ignore_errors(sentry_init, capture_events): - with mock.patch( - "sentry_sdk.scope.Scope._capture_internal_exception" - ) as mock_capture_internal_exception: - - class MyDivisionError(ZeroDivisionError): - pass + sentry_init(ignore_errors=[ZeroDivisionError]) + events = capture_events() - sentry_init(ignore_errors=[ZeroDivisionError], transport=_TestTransport()) + class MyDivisionError(ZeroDivisionError): + pass - def e(exc): - try: - raise exc - except Exception: - capture_exception() + def e(exc): + try: + raise exc + except Exception: + capture_exception() - e(ZeroDivisionError()) - e(MyDivisionError()) - e(ValueError()) + e(ZeroDivisionError()) + e(MyDivisionError()) + e(ValueError()) - assert mock_capture_internal_exception.call_count == 1 - assert ( - mock_capture_internal_exception.call_args[0][0][0] == EnvelopeCapturedError - ) + assert len(events) == 1 + assert events[0]["exception"]["values"][0]["type"] == "ValueError" def test_include_local_variables_enabled(sentry_init, capture_events): @@ -570,7 +566,9 @@ def capture_envelope(self, envelope): assert output.count(b"HI") == num_messages -def test_configure_scope_available(sentry_init, request, monkeypatch): +def test_configure_scope_available( + sentry_init, request, monkeypatch, suppress_deprecation_warnings +): """ Test that scope is configured if client is configured @@ -597,9 +595,7 @@ def callback(scope): def test_client_debug_option_enabled(sentry_init, caplog): sentry_init(debug=True) - sentry_sdk.Scope.get_isolation_scope()._capture_internal_exception( - (ValueError, ValueError("OK"), None) - ) + capture_internal_exception((ValueError, ValueError("OK"), None)) assert "OK" in caplog.text @@ -609,9 +605,7 @@ def test_client_debug_option_disabled(with_client, sentry_init, caplog): if with_client: sentry_init() - sentry_sdk.Scope.get_isolation_scope()._capture_internal_exception( - (ValueError, ValueError("OK"), None) - ) + capture_internal_exception((ValueError, ValueError("OK"), None)) assert "OK" not in caplog.text @@ -686,14 +680,13 @@ def test_cyclic_data(sentry_init, capture_events): sentry_init() events = capture_events() - with configure_scope() as scope: - data = {} - data["is_cyclic"] = data + data = {} + data["is_cyclic"] = data - other_data = "" - data["not_cyclic"] = other_data - data["not_cyclic2"] = other_data - scope.set_extra("foo", data) + other_data = "" + data["not_cyclic"] = other_data + data["not_cyclic2"] = other_data + sentry_sdk.get_isolation_scope().set_extra("foo", data) capture_message("hi") (event,) = events @@ -1064,9 +1057,7 @@ def test_debug_option( else: sentry_init(debug=client_option) - sentry_sdk.Scope.get_isolation_scope()._capture_internal_exception( - (ValueError, ValueError("something is wrong"), None) - ) + capture_internal_exception((ValueError, ValueError("something is wrong"), None)) if debug_output_expected: assert "something is wrong" in caplog.text else: diff --git a/tests/test_metrics.py b/tests/test_metrics.py index a29a18b0cf..537f8a9646 100644 --- a/tests/test_metrics.py +++ b/tests/test_metrics.py @@ -6,7 +6,7 @@ import pytest import sentry_sdk -from sentry_sdk import Scope, metrics +from sentry_sdk import metrics from sentry_sdk.tracing import TRANSACTION_SOURCE_ROUTE from sentry_sdk.envelope import parse_json @@ -538,8 +538,9 @@ def test_transaction_name( ts = time.time() envelopes = capture_envelopes() - scope = Scope.get_current_scope() - scope.set_transaction_name("/user/{user_id}", source="route") + sentry_sdk.get_current_scope().set_transaction_name( + "/user/{user_id}", source="route" + ) metrics.distribution("dist", 1.0, tags={"a": "b"}, timestamp=ts) metrics.distribution("dist", 2.0, tags={"a": "b"}, timestamp=ts) metrics.distribution("dist", 2.0, tags={"a": "b"}, timestamp=ts) diff --git a/tests/test_sessions.py b/tests/test_sessions.py index 989bfeadd1..c10b9262ce 100644 --- a/tests/test_sessions.py +++ b/tests/test_sessions.py @@ -14,16 +14,16 @@ def test_basic(sentry_init, capture_envelopes): sentry_init(release="fun-release", environment="not-fun-env") envelopes = capture_envelopes() - sentry_sdk.Scope.get_isolation_scope().start_session() + sentry_sdk.get_isolation_scope().start_session() try: - scope = sentry_sdk.Scope.get_current_scope() + scope = sentry_sdk.get_current_scope() scope.set_user({"id": "42"}) raise Exception("all is wrong") except Exception: sentry_sdk.capture_exception() - sentry_sdk.Scope.get_isolation_scope().end_session() + sentry_sdk.get_isolation_scope().end_session() sentry_sdk.flush() assert len(envelopes) == 2 @@ -51,9 +51,9 @@ def test_aggregates(sentry_init, capture_envelopes): envelopes = capture_envelopes() with auto_session_tracking(session_mode="request"): - with sentry_sdk.push_scope(): + with sentry_sdk.new_scope() as scope: try: - scope = sentry_sdk.Scope.get_current_scope() + scope = sentry_sdk.get_current_scope() scope.set_user({"id": "42"}) raise Exception("all is wrong") except Exception: @@ -62,8 +62,8 @@ def test_aggregates(sentry_init, capture_envelopes): with auto_session_tracking(session_mode="request"): pass - sentry_sdk.Scope.get_isolation_scope().start_session(session_mode="request") - sentry_sdk.Scope.get_isolation_scope().end_session() + sentry_sdk.get_isolation_scope().start_session(session_mode="request") + sentry_sdk.get_isolation_scope().end_session() sentry_sdk.flush() assert len(envelopes) == 2 @@ -92,7 +92,7 @@ def test_aggregates_explicitly_disabled_session_tracking_request_mode( envelopes = capture_envelopes() with auto_session_tracking(session_mode="request"): - with sentry_sdk.push_scope(): + with sentry_sdk.new_scope(): try: raise Exception("all is wrong") except Exception: @@ -101,8 +101,8 @@ def test_aggregates_explicitly_disabled_session_tracking_request_mode( with auto_session_tracking(session_mode="request"): pass - sentry_sdk.Scope.get_isolation_scope().start_session(session_mode="request") - sentry_sdk.Scope.get_isolation_scope().end_session() + sentry_sdk.get_isolation_scope().start_session(session_mode="request") + sentry_sdk.get_isolation_scope().end_session() sentry_sdk.flush() sess = envelopes[1] @@ -127,7 +127,7 @@ def test_no_thread_on_shutdown_no_errors(sentry_init): side_effect=RuntimeError("can't create new thread at interpreter shutdown"), ): with auto_session_tracking(session_mode="request"): - with sentry_sdk.push_scope(): + with sentry_sdk.new_scope(): try: raise Exception("all is wrong") except Exception: @@ -136,6 +136,6 @@ def test_no_thread_on_shutdown_no_errors(sentry_init): with auto_session_tracking(session_mode="request"): pass - sentry_sdk.Scope.get_isolation_scope().start_session(session_mode="request") - sentry_sdk.Scope.get_isolation_scope().end_session() + sentry_sdk.get_isolation_scope().start_session(session_mode="request") + sentry_sdk.get_isolation_scope().end_session() sentry_sdk.flush() diff --git a/tests/test_transport.py b/tests/test_transport.py index 5fc81d6817..2e2ad3c4cd 100644 --- a/tests/test_transport.py +++ b/tests/test_transport.py @@ -12,9 +12,20 @@ from werkzeug.wrappers import Request, Response import sentry_sdk -from sentry_sdk import Client, add_breadcrumb, capture_message, Scope +from sentry_sdk import ( + Client, + add_breadcrumb, + capture_message, + isolation_scope, + get_isolation_scope, + Hub, +) from sentry_sdk.envelope import Envelope, Item, parse_json -from sentry_sdk.transport import KEEP_ALIVE_SOCKET_OPTIONS, _parse_rate_limits +from sentry_sdk.transport import ( + KEEP_ALIVE_SOCKET_OPTIONS, + _parse_rate_limits, + HttpTransport, +) from sentry_sdk.integrations.logging import LoggingIntegration, ignore_logger CapturedData = namedtuple("CapturedData", ["path", "event", "envelope", "compressed"]) @@ -128,8 +139,8 @@ def test_transport_works( if use_pickle: client = pickle.loads(pickle.dumps(client)) - sentry_sdk.Scope.get_global_scope().set_client(client) - request.addfinalizer(lambda: sentry_sdk.Scope.get_global_scope().set_client(None)) + sentry_sdk.get_global_scope().set_client(client) + request.addfinalizer(lambda: sentry_sdk.get_global_scope().set_client(None)) add_breadcrumb( level="info", message="i like bread", timestamp=datetime.now(timezone.utc) @@ -264,8 +275,8 @@ def test_transport_infinite_loop(capturing_server, request, make_client): # to an infinite loop ignore_logger("werkzeug") - sentry_sdk.Scope.get_global_scope().set_client(client) - with sentry_sdk.isolation_scope(): + sentry_sdk.get_global_scope().set_client(client) + with isolation_scope(): capture_message("hi") client.flush() @@ -280,8 +291,8 @@ def test_transport_no_thread_on_shutdown_no_errors(capturing_server, make_client "threading.Thread.start", side_effect=RuntimeError("can't create new thread at interpreter shutdown"), ): - sentry_sdk.Scope.get_global_scope().set_client(client) - with sentry_sdk.isolation_scope(): + sentry_sdk.get_global_scope().set_client(client) + with isolation_scope(): capture_message("hi") # nothing exploded but also no events can be sent anymore @@ -434,7 +445,7 @@ def intercepting_fetch(*args, **kwargs): client.transport._last_client_report_sent = 0 outcomes_enabled = True - scope = Scope() + scope = get_isolation_scope() scope.add_attachment(bytes=b"Hello World", filename="hello.txt") client.capture_event({"type": "error"}, scope=scope) client.flush() @@ -639,15 +650,15 @@ def test_metric_bucket_limits_with_all_namespaces( def test_hub_cls_backwards_compat(): - class TestCustomHubClass(sentry_sdk.Hub): + class TestCustomHubClass(Hub): pass - transport = sentry_sdk.transport.HttpTransport( + transport = HttpTransport( defaultdict(lambda: None, {"dsn": "https://123abc@example.com/123"}) ) with pytest.deprecated_call(): - assert transport.hub_cls is sentry_sdk.Hub + assert transport.hub_cls is Hub with pytest.deprecated_call(): transport.hub_cls = TestCustomHubClass diff --git a/tests/tracing/test_integration_tests.py b/tests/tracing/test_integration_tests.py index adab261745..47170af97b 100644 --- a/tests/tracing/test_integration_tests.py +++ b/tests/tracing/test_integration_tests.py @@ -4,9 +4,9 @@ import pytest import random +import sentry_sdk from sentry_sdk import ( capture_message, - Scope, start_span, start_transaction, ) @@ -66,7 +66,7 @@ def test_continue_from_headers(sentry_init, capture_envelopes, sampled, sample_r with start_span() as old_span: old_span.sampled = sampled headers = dict( - Scope.get_current_scope().iter_trace_propagation_headers(old_span) + sentry_sdk.get_current_scope().iter_trace_propagation_headers(old_span) ) headers["baggage"] = ( "other-vendor-value-1=foo;bar;baz, " @@ -101,7 +101,7 @@ def test_continue_from_headers(sentry_init, capture_envelopes, sampled, sample_r with start_transaction(child_transaction): # change the transaction name from "WRONG" to make sure the change # is reflected in the final data - Scope.get_current_scope().transaction = "ho" + sentry_sdk.get_current_scope().transaction = "ho" capture_message("hello") # in this case the child transaction won't be captured @@ -271,7 +271,7 @@ def test_trace_propagation_meta_head_sdk(sentry_init): with start_transaction(transaction): with start_span(op="foo", description="foodesc") as current_span: span = current_span - meta = Scope.get_current_scope().trace_propagation_meta() + meta = sentry_sdk.get_current_scope().trace_propagation_meta() ind = meta.find(">") + 1 sentry_trace, baggage = meta[:ind], meta[ind:] diff --git a/tests/tracing/test_misc.py b/tests/tracing/test_misc.py index fcfcf31b69..de25acd7d2 100644 --- a/tests/tracing/test_misc.py +++ b/tests/tracing/test_misc.py @@ -6,7 +6,7 @@ from unittest.mock import MagicMock import sentry_sdk -from sentry_sdk import Scope, start_span, start_transaction, set_measurement +from sentry_sdk import start_span, start_transaction, set_measurement from sentry_sdk.consts import MATCH_ALL from sentry_sdk.tracing import Span, Transaction from sentry_sdk.tracing_utils import should_propagate_trace @@ -84,7 +84,7 @@ def test_finds_transaction_on_scope(sentry_init): transaction = start_transaction(name="dogpark") - scope = Scope.get_current_scope() + scope = sentry_sdk.get_current_scope() # See note in Scope class re: getters and setters of the `transaction` # property. For the moment, assigning to scope.transaction merely sets the @@ -113,7 +113,7 @@ def test_finds_transaction_when_descendent_span_is_on_scope( transaction = start_transaction(name="dogpark") child_span = transaction.start_child(op="sniffing") - scope = Scope.get_current_scope() + scope = sentry_sdk.get_current_scope() scope._span = child_span # this is the same whether it's the transaction itself or one of its @@ -136,7 +136,7 @@ def test_finds_orphan_span_on_scope(sentry_init): span = start_span(op="sniffing") - scope = Scope.get_current_scope() + scope = sentry_sdk.get_current_scope() scope._span = span assert scope._span is not None @@ -150,7 +150,7 @@ def test_finds_non_orphan_span_on_scope(sentry_init): transaction = start_transaction(name="dogpark") child_span = transaction.start_child(op="sniffing") - scope = Scope.get_current_scope() + scope = sentry_sdk.get_current_scope() scope._span = child_span assert scope._span is not None @@ -357,7 +357,7 @@ def test_should_propagate_trace_to_sentry( def test_start_transaction_updates_scope_name_source(sentry_init): sentry_init(traces_sample_rate=1.0) - scope = Scope.get_current_scope() + scope = sentry_sdk.get_current_scope() with start_transaction(name="foobar", source="route"): assert scope._transaction == "foobar" diff --git a/tests/tracing/test_sampling.py b/tests/tracing/test_sampling.py index 491281fa67..2e6ed0dab3 100644 --- a/tests/tracing/test_sampling.py +++ b/tests/tracing/test_sampling.py @@ -4,7 +4,8 @@ import pytest -from sentry_sdk import Scope, start_span, start_transaction, capture_exception +import sentry_sdk +from sentry_sdk import start_span, start_transaction, capture_exception from sentry_sdk.tracing import Transaction from sentry_sdk.utils import logger @@ -56,7 +57,7 @@ def test_get_transaction_and_span_from_scope_regardless_of_sampling_decision( with start_transaction(name="/", sampled=sampling_decision): with start_span(op="child-span"): with start_span(op="child-child-span"): - scope = Scope.get_current_scope() + scope = sentry_sdk.get_current_scope() assert scope.span.op == "child-child-span" assert scope.transaction.name == "/" diff --git a/tox.ini b/tox.ini index 3ab1bae529..2b5ef6d8d2 100644 --- a/tox.ini +++ b/tox.ini @@ -396,7 +396,7 @@ deps = django-v4.1: Django~=4.1.0 django-v4.2: Django~=4.2.0 django-v5.0: Django~=5.0.0 - django-v5.1: Django==5.1b1 + django-v5.1: Django==5.1rc1 django-latest: Django # Falcon @@ -505,8 +505,6 @@ deps = # OpenTelemetry Experimental (POTel) potel: -e .[opentelemetry-experimental] - potel: Flask<3 - potel: fastapi # pure_eval pure_eval: pure_eval @@ -648,6 +646,7 @@ setenv = OBJC_DISABLE_INITIALIZE_FORK_SAFETY=YES COVERAGE_FILE=.coverage-{envname} django: DJANGO_SETTINGS_MODULE=tests.integrations.django.myapp.settings + py3.12-django: PIP_CONSTRAINT=constraints.txt common: TESTPATH=tests gevent: TESTPATH=tests From 275189ecfa810176e658ff1aeaca00dc680605e6 Mon Sep 17 00:00:00 2001 From: Daniel Szoke <7881302+szokeasaurusrex@users.noreply.github.com> Date: Wed, 7 Aug 2024 14:42:56 +0200 Subject: [PATCH 008/244] feat(api): Remove `sentry_sdk.configure_scope` (#3406) Also, remove any tests for `sentry_sdk.configure_scope`. Since Strawberry's deprecated [Sentry tracing extensions](https://strawberry.rocks/docs/extensions/sentry-tracing) import `sentry_sdk.configure_scope`, importing `strawberry.extensions.tracing.SentryTracingExtension` (or `SentryTracingExtensionSync`) will result in an unhandled exception. Therefore, these imports, and any functionality associated with them, have also been removed. This itself is not a breaking change, as it is necessitated by the removal of `sentry_sdk.configure_scope`. BREAKING CHANGE: Remove `sentry_sdk.configure_scope`. Closes: #3402 --- docs/api.rst | 2 - sentry_sdk/__init__.py | 1 - sentry_sdk/api.py | 53 --------- sentry_sdk/integrations/strawberry.py | 17 --- .../strawberry/test_strawberry.py | 22 ---- .../test_new_scopes_compat.py | 29 ----- .../test_new_scopes_compat_event.py | 103 +----------------- tests/test_api.py | 7 -- tests/test_client.py | 52 +-------- 9 files changed, 2 insertions(+), 284 deletions(-) diff --git a/docs/api.rst b/docs/api.rst index 034652e05c..fe069c22d6 100644 --- a/docs/api.rst +++ b/docs/api.rst @@ -51,8 +51,6 @@ Client Management Managing Scope (advanced) ========================= -.. autofunction:: sentry_sdk.api.configure_scope .. autofunction:: sentry_sdk.api.push_scope .. autofunction:: sentry_sdk.api.new_scope - diff --git a/sentry_sdk/__init__.py b/sentry_sdk/__init__.py index 1c9cedec5f..4332c706c4 100644 --- a/sentry_sdk/__init__.py +++ b/sentry_sdk/__init__.py @@ -19,7 +19,6 @@ "capture_event", "capture_exception", "capture_message", - "configure_scope", "continue_trace", "flush", "get_baggage", diff --git a/sentry_sdk/api.py b/sentry_sdk/api.py index 9c11031fbb..98899feb22 100644 --- a/sentry_sdk/api.py +++ b/sentry_sdk/api.py @@ -1,6 +1,5 @@ import inspect import warnings -from contextlib import contextmanager from sentry_sdk import tracing_utils, Client from sentry_sdk._init_implementation import init @@ -16,7 +15,6 @@ from typing import Any from typing import Dict - from typing import Generator from typing import Optional from typing import overload from typing import Callable @@ -55,7 +53,6 @@ def overload(x): "capture_event", "capture_exception", "capture_message", - "configure_scope", "continue_trace", "flush", "get_baggage", @@ -194,56 +191,6 @@ def add_breadcrumb( return get_isolation_scope().add_breadcrumb(crumb, hint, **kwargs) -@overload -def configure_scope(): - # type: () -> ContextManager[Scope] - pass - - -@overload -def configure_scope( # noqa: F811 - callback, # type: Callable[[Scope], None] -): - # type: (...) -> None - pass - - -def configure_scope( # noqa: F811 - callback=None, # type: Optional[Callable[[Scope], None]] -): - # type: (...) -> Optional[ContextManager[Scope]] - """ - Reconfigures the scope. - - :param callback: If provided, call the callback with the current scope. - - :returns: If no callback is provided, returns a context manager that returns the scope. - """ - warnings.warn( - "sentry_sdk.configure_scope is deprecated and will be removed in the next major version. " - "Please consult our migration guide to learn how to migrate to the new API: " - "https://docs.sentry.io/platforms/python/migration/1.x-to-2.x#scope-configuring", - DeprecationWarning, - stacklevel=2, - ) - - scope = get_isolation_scope() - scope.generate_propagation_context() - - if callback is not None: - # TODO: used to return None when client is None. Check if this changes behavior. - callback(scope) - - return None - - @contextmanager - def inner(): - # type: () -> Generator[Scope, None, None] - yield scope - - return inner() - - @overload def push_scope(): # type: () -> ContextManager[Scope] diff --git a/sentry_sdk/integrations/strawberry.py b/sentry_sdk/integrations/strawberry.py index 148edac334..32dfd35876 100644 --- a/sentry_sdk/integrations/strawberry.py +++ b/sentry_sdk/integrations/strawberry.py @@ -31,10 +31,6 @@ from strawberry import Schema from strawberry.extensions import SchemaExtension # type: ignore from strawberry.extensions.tracing.utils import should_skip_tracing as strawberry_should_skip_tracing # type: ignore - from strawberry.extensions.tracing import ( # type: ignore - SentryTracingExtension as StrawberrySentryAsyncExtension, - SentryTracingExtensionSync as StrawberrySentrySyncExtension, - ) from strawberry.http import async_base_view, sync_base_view # type: ignore except ImportError: raise DidNotEnable("strawberry-graphql is not installed") @@ -104,14 +100,6 @@ def _sentry_patched_schema_init(self, *args, **kwargs): "False" if should_use_async_extension else "True", ) - # remove the built in strawberry sentry extension, if present - extensions = [ - extension - for extension in extensions - if extension - not in (StrawberrySentryAsyncExtension, StrawberrySentrySyncExtension) - ] - # add our extension extensions.append( SentryAsyncExtension if should_use_async_extension else SentrySyncExtension @@ -412,11 +400,6 @@ def inner(event, hint): def _guess_if_using_async(extensions): # type: (List[SchemaExtension]) -> bool - if StrawberrySentryAsyncExtension in extensions: - return True - elif StrawberrySentrySyncExtension in extensions: - return False - return bool( {"starlette", "starlite", "litestar", "fastapi"} & set(_get_installed_modules()) ) diff --git a/tests/integrations/strawberry/test_strawberry.py b/tests/integrations/strawberry/test_strawberry.py index dcc6632bdb..977aadc0cd 100644 --- a/tests/integrations/strawberry/test_strawberry.py +++ b/tests/integrations/strawberry/test_strawberry.py @@ -10,10 +10,6 @@ from fastapi import FastAPI from fastapi.testclient import TestClient from flask import Flask -from strawberry.extensions.tracing import ( - SentryTracingExtension, - SentryTracingExtensionSync, -) from strawberry.fastapi import GraphQLRouter from strawberry.flask.views import GraphQLView @@ -143,24 +139,6 @@ def test_infer_execution_type_from_installed_packages_sync(sentry_init): assert SentrySyncExtension in schema.extensions -def test_replace_existing_sentry_async_extension(sentry_init): - sentry_init(integrations=[StrawberryIntegration()]) - - schema = strawberry.Schema(Query, extensions=[SentryTracingExtension]) - assert SentryTracingExtension not in schema.extensions - assert SentrySyncExtension not in schema.extensions - assert SentryAsyncExtension in schema.extensions - - -def test_replace_existing_sentry_sync_extension(sentry_init): - sentry_init(integrations=[StrawberryIntegration()]) - - schema = strawberry.Schema(Query, extensions=[SentryTracingExtensionSync]) - assert SentryTracingExtensionSync not in schema.extensions - assert SentryAsyncExtension not in schema.extensions - assert SentrySyncExtension in schema.extensions - - @parameterize_strawberry_test def test_capture_request_if_available_and_send_pii_is_on( request, diff --git a/tests/new_scopes_compat/test_new_scopes_compat.py b/tests/new_scopes_compat/test_new_scopes_compat.py index 21e2ac27d3..3c7d2fced2 100644 --- a/tests/new_scopes_compat/test_new_scopes_compat.py +++ b/tests/new_scopes_compat/test_new_scopes_compat.py @@ -11,35 +11,6 @@ """ -def test_configure_scope_sdk1(sentry_init, capture_events): - """ - Mutate data in a `with configure_scope` block. - - Checks the results of SDK 2.x against the results the same code returned in SDK 1.x. - """ - sentry_init() - - events = capture_events() - - sentry_sdk.set_tag("A", 1) - sentry_sdk.capture_message("Event A") - - with sentry_sdk.configure_scope() as scope: # configure scope - sentry_sdk.set_tag("B1", 1) - scope.set_tag("B2", 1) - sentry_sdk.capture_message("Event B") - - sentry_sdk.set_tag("Z", 1) - sentry_sdk.capture_message("Event Z") - - (event_a, event_b, event_z) = events - - # Check against the results the same code returned in SDK 1.x - assert event_a["tags"] == {"A": 1} - assert event_b["tags"] == {"A": 1, "B1": 1, "B2": 1} - assert event_z["tags"] == {"A": 1, "B1": 1, "B2": 1, "Z": 1} - - def test_push_scope_sdk1(sentry_init, capture_events): """ Mutate data in a `with push_scope` block diff --git a/tests/new_scopes_compat/test_new_scopes_compat_event.py b/tests/new_scopes_compat/test_new_scopes_compat_event.py index db1e5fec4b..af4f0c6efb 100644 --- a/tests/new_scopes_compat/test_new_scopes_compat_event.py +++ b/tests/new_scopes_compat/test_new_scopes_compat_event.py @@ -335,71 +335,6 @@ def test_event(sentry_init, capture_envelopes, expected_error, expected_transact envelopes = capture_envelopes() - with sentry_sdk.start_transaction( - name="test_transaction", op="test_transaction_op" - ) as trx: - with sentry_sdk.start_span(op="test_span") as span: - with sentry_sdk.configure_scope() as scope: # configure scope - _generate_event_data(scope) - _faulty_function() - - (error_envelope, transaction_envelope) = envelopes - - error = error_envelope.get_event() - transaction = transaction_envelope.get_transaction_event() - attachment = error_envelope.items[-1] - - assert error == expected_error(trx, span) - assert transaction == expected_transaction(trx, span) - assert attachment.headers == { - "filename": "hello.txt", - "type": "attachment", - "content_type": "text/plain", - } - assert attachment.payload.bytes == b"Hello World" - - -def test_event2(sentry_init, capture_envelopes, expected_error, expected_transaction): - _init_sentry_sdk(sentry_init) - - envelopes = capture_envelopes() - - with Hub(Hub.current): - sentry_sdk.set_tag("A", 1) # will not be added - - with Hub.current: # with hub - with sentry_sdk.push_scope() as scope: - scope.set_tag("B", 1) # will not be added - - with sentry_sdk.start_transaction( - name="test_transaction", op="test_transaction_op" - ) as trx: - with sentry_sdk.start_span(op="test_span") as span: - with sentry_sdk.configure_scope() as scope: # configure scope - _generate_event_data(scope) - _faulty_function() - - (error_envelope, transaction_envelope) = envelopes - - error = error_envelope.get_event() - transaction = transaction_envelope.get_transaction_event() - attachment = error_envelope.items[-1] - - assert error == expected_error(trx, span) - assert transaction == expected_transaction(trx, span) - assert attachment.headers == { - "filename": "hello.txt", - "type": "attachment", - "content_type": "text/plain", - } - assert attachment.payload.bytes == b"Hello World" - - -def test_event3(sentry_init, capture_envelopes, expected_error, expected_transaction): - _init_sentry_sdk(sentry_init) - - envelopes = capture_envelopes() - with Hub(Hub.current): sentry_sdk.set_tag("A", 1) # will not be added @@ -431,43 +366,7 @@ def test_event3(sentry_init, capture_envelopes, expected_error, expected_transac assert attachment.payload.bytes == b"Hello World" -def test_event4(sentry_init, capture_envelopes, expected_error, expected_transaction): - _init_sentry_sdk(sentry_init) - - envelopes = capture_envelopes() - - with Hub(Hub.current): - sentry_sdk.set_tag("A", 1) # will not be added - - with Hub(Hub.current): # with hub clone - with sentry_sdk.push_scope() as scope: - scope.set_tag("B", 1) # will not be added - - with sentry_sdk.start_transaction( - name="test_transaction", op="test_transaction_op" - ) as trx: - with sentry_sdk.start_span(op="test_span") as span: - with sentry_sdk.configure_scope() as scope: # configure scope - _generate_event_data(scope) - _faulty_function() - - (error_envelope, transaction_envelope) = envelopes - - error = error_envelope.get_event() - transaction = transaction_envelope.get_transaction_event() - attachment = error_envelope.items[-1] - - assert error == expected_error(trx, span) - assert transaction == expected_transaction(trx, span) - assert attachment.headers == { - "filename": "hello.txt", - "type": "attachment", - "content_type": "text/plain", - } - assert attachment.payload.bytes == b"Hello World" - - -def test_event5(sentry_init, capture_envelopes, expected_error, expected_transaction): +def test_event2(sentry_init, capture_envelopes, expected_error, expected_transaction): _init_sentry_sdk(sentry_init) envelopes = capture_envelopes() diff --git a/tests/test_api.py b/tests/test_api.py index ae194af7fd..35c0576027 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -11,7 +11,6 @@ is_initialized, start_transaction, set_tags, - configure_scope, push_scope, get_global_scope, get_current_scope, @@ -185,12 +184,6 @@ def test_set_tags(sentry_init, capture_events): }, "Updating tags with empty dict changed tags" -def test_configure_scope_deprecation(): - with pytest.warns(DeprecationWarning): - with configure_scope(): - ... - - def test_push_scope_deprecation(): with pytest.warns(DeprecationWarning): with push_scope(): diff --git a/tests/test_client.py b/tests/test_client.py index f6c2cec05c..7393991af7 100644 --- a/tests/test_client.py +++ b/tests/test_client.py @@ -12,10 +12,8 @@ import sentry_sdk from sentry_sdk import ( - Hub, Client, add_breadcrumb, - configure_scope, capture_message, capture_exception, capture_event, @@ -557,39 +555,12 @@ def capture_envelope(self, envelope): ) start = time.time() - output = subprocess.check_output([sys.executable, str(app)]) + subprocess.check_output([sys.executable, str(app)]) end = time.time() # Each message takes at least 0.1 seconds to process assert int(end - start) >= num_messages / 10 - assert output.count(b"HI") == num_messages - - -def test_configure_scope_available( - sentry_init, request, monkeypatch, suppress_deprecation_warnings -): - """ - Test that scope is configured if client is configured - - This test can be removed once configure_scope and the Hub are removed. - """ - sentry_init() - - with configure_scope() as scope: - assert scope is Hub.current.scope - scope.set_tag("foo", "bar") - - calls = [] - - def callback(scope): - calls.append(scope) - scope.set_tag("foo", "bar") - - assert configure_scope(callback) is None - assert len(calls) == 1 - assert calls[0] is Hub.current.scope - @pytest.mark.tests_internal_exceptions def test_client_debug_option_enabled(sentry_init, caplog): @@ -609,27 +580,6 @@ def test_client_debug_option_disabled(with_client, sentry_init, caplog): assert "OK" not in caplog.text -@pytest.mark.skip( - reason="New behavior in SDK 2.0: You have a scope before init and add data to it." -) -def test_scope_initialized_before_client(sentry_init, capture_events): - """ - This is a consequence of how configure_scope() works. We must - make `configure_scope()` a noop if no client is configured. Even - if the user later configures a client: We don't know that. - """ - with configure_scope() as scope: - scope.set_tag("foo", 42) - - sentry_init() - - events = capture_events() - capture_message("hi") - (event,) = events - - assert "tags" not in event - - def test_weird_chars(sentry_init, capture_events): sentry_init() events = capture_events() From 3936502957258061d2c16358a731986f58d20ec6 Mon Sep 17 00:00:00 2001 From: Daniel Szoke <7881302+szokeasaurusrex@users.noreply.github.com> Date: Wed, 7 Aug 2024 17:08:44 +0200 Subject: [PATCH 009/244] feat(api): Remove `sentry_sdk.push_scope` (#3408) Also, remove any tests that test `sentry_sdk.push_scope`. BREAKING CHANGE: Remove `sentry_sdk.push_scope`. Closes #3403 --- docs/api.rst | 2 - sentry_sdk/__init__.py | 1 - sentry_sdk/api.py | 56 +-- .../test_new_scopes_compat.py | 29 -- .../test_new_scopes_compat_event.py | 402 ------------------ tests/test_api.py | 7 - tests/test_basics.py | 137 ------ 7 files changed, 1 insertion(+), 633 deletions(-) delete mode 100644 tests/new_scopes_compat/test_new_scopes_compat_event.py diff --git a/docs/api.rst b/docs/api.rst index fe069c22d6..73821d720d 100644 --- a/docs/api.rst +++ b/docs/api.rst @@ -51,6 +51,4 @@ Client Management Managing Scope (advanced) ========================= -.. autofunction:: sentry_sdk.api.push_scope - .. autofunction:: sentry_sdk.api.new_scope diff --git a/sentry_sdk/__init__.py b/sentry_sdk/__init__.py index 4332c706c4..8ff8dc703e 100644 --- a/sentry_sdk/__init__.py +++ b/sentry_sdk/__init__.py @@ -32,7 +32,6 @@ "isolation_scope", "last_event_id", "new_scope", - "push_scope", "set_context", "set_extra", "set_level", diff --git a/sentry_sdk/api.py b/sentry_sdk/api.py index 98899feb22..49a3bf7ecf 100644 --- a/sentry_sdk/api.py +++ b/sentry_sdk/api.py @@ -1,9 +1,8 @@ import inspect -import warnings from sentry_sdk import tracing_utils, Client from sentry_sdk._init_implementation import init -from sentry_sdk.scope import Scope, _ScopeManager, new_scope, isolation_scope +from sentry_sdk.scope import Scope, new_scope, isolation_scope from sentry_sdk.tracing import NoOpSpan, Transaction, trace from sentry_sdk.crons import monitor @@ -16,10 +15,8 @@ from typing import Any from typing import Dict from typing import Optional - from typing import overload from typing import Callable from typing import TypeVar - from typing import ContextManager from typing import Union from typing_extensions import Unpack @@ -39,11 +36,6 @@ T = TypeVar("T") F = TypeVar("F", bound=Callable[..., Any]) -else: - - def overload(x): - # type: (T) -> T - return x # When changing this, update __all__ in __init__.py too @@ -66,7 +58,6 @@ def overload(x): "isolation_scope", "last_event_id", "new_scope", - "push_scope", "set_context", "set_extra", "set_level", @@ -191,51 +182,6 @@ def add_breadcrumb( return get_isolation_scope().add_breadcrumb(crumb, hint, **kwargs) -@overload -def push_scope(): - # type: () -> ContextManager[Scope] - pass - - -@overload -def push_scope( # noqa: F811 - callback, # type: Callable[[Scope], None] -): - # type: (...) -> None - pass - - -def push_scope( # noqa: F811 - callback=None, # type: Optional[Callable[[Scope], None]] -): - # type: (...) -> Optional[ContextManager[Scope]] - """ - Pushes a new layer on the scope stack. - - :param callback: If provided, this method pushes a scope, calls - `callback`, and pops the scope again. - - :returns: If no `callback` is provided, a context manager that should - be used to pop the scope again. - """ - warnings.warn( - "sentry_sdk.push_scope is deprecated and will be removed in the next major version. " - "Please consult our migration guide to learn how to migrate to the new API: " - "https://docs.sentry.io/platforms/python/migration/1.x-to-2.x#scope-pushing", - DeprecationWarning, - stacklevel=2, - ) - - if callback is not None: - with warnings.catch_warnings(): - warnings.simplefilter("ignore", DeprecationWarning) - with push_scope() as scope: - callback(scope) - return None - - return _ScopeManager() - - @scopemethod def set_tag(key, value): # type: (str, Any) -> None diff --git a/tests/new_scopes_compat/test_new_scopes_compat.py b/tests/new_scopes_compat/test_new_scopes_compat.py index 3c7d2fced2..1e109ec036 100644 --- a/tests/new_scopes_compat/test_new_scopes_compat.py +++ b/tests/new_scopes_compat/test_new_scopes_compat.py @@ -11,35 +11,6 @@ """ -def test_push_scope_sdk1(sentry_init, capture_events): - """ - Mutate data in a `with push_scope` block - - Checks the results of SDK 2.x against the results the same code returned in SDK 1.x. - """ - sentry_init() - - events = capture_events() - - sentry_sdk.set_tag("A", 1) - sentry_sdk.capture_message("Event A") - - with sentry_sdk.push_scope() as scope: # push scope - sentry_sdk.set_tag("B1", 1) - scope.set_tag("B2", 1) - sentry_sdk.capture_message("Event B") - - sentry_sdk.set_tag("Z", 1) - sentry_sdk.capture_message("Event Z") - - (event_a, event_b, event_z) = events - - # Check against the results the same code returned in SDK 1.x - assert event_a["tags"] == {"A": 1} - assert event_b["tags"] == {"A": 1, "B1": 1, "B2": 1} - assert event_z["tags"] == {"A": 1, "Z": 1} - - def test_with_hub_sdk1(sentry_init, capture_events): """ Mutate data in a `with Hub:` block diff --git a/tests/new_scopes_compat/test_new_scopes_compat_event.py b/tests/new_scopes_compat/test_new_scopes_compat_event.py deleted file mode 100644 index af4f0c6efb..0000000000 --- a/tests/new_scopes_compat/test_new_scopes_compat_event.py +++ /dev/null @@ -1,402 +0,0 @@ -import pytest - -from unittest import mock - -import sentry_sdk -from sentry_sdk.hub import Hub -from sentry_sdk.integrations import iter_default_integrations -from sentry_sdk.scrubber import EventScrubber, DEFAULT_DENYLIST - - -""" -Those tests are meant to check the compatibility of the new scopes in SDK 2.0 with the old Hub/Scope system in SDK 1.x. - -Those tests have been run with the latest SDK 1.x version and the data used in the `assert` statements represents -the behvaior of the SDK 1.x. - -This makes sure that we are backwards compatible. (on a best effort basis, there will probably be some edge cases that are not covered here) -""" - - -@pytest.fixture -def integrations(): - return [ - integration.identifier - for integration in iter_default_integrations( - with_auto_enabling_integrations=False - ) - ] - - -@pytest.fixture -def expected_error(integrations): - def create_expected_error_event(trx, span): - return { - "level": "warning-X", - "exception": { - "values": [ - { - "mechanism": {"type": "generic", "handled": True}, - "module": None, - "type": "ValueError", - "value": "This is a test exception", - "stacktrace": { - "frames": [ - { - "filename": "tests/new_scopes_compat/test_new_scopes_compat_event.py", - "abs_path": mock.ANY, - "function": "_faulty_function", - "module": "tests.new_scopes_compat.test_new_scopes_compat_event", - "lineno": mock.ANY, - "pre_context": [ - " return create_expected_transaction_event", - "", - "", - "def _faulty_function():", - " try:", - ], - "context_line": ' raise ValueError("This is a test exception")', - "post_context": [ - " except ValueError as ex:", - " sentry_sdk.capture_exception(ex)", - "", - "", - "def _test_before_send(event, hint):", - ], - "vars": { - "ex": mock.ANY, - }, - "in_app": True, - } - ] - }, - } - ] - }, - "event_id": mock.ANY, - "timestamp": mock.ANY, - "contexts": { - "character": { - "name": "Mighty Fighter changed by before_send", - "age": 19, - "attack_type": "melee", - }, - "trace": { - "trace_id": trx.trace_id, - "span_id": span.span_id, - "parent_span_id": span.parent_span_id, - "op": "test_span", - "origin": "manual", - "description": None, - "data": { - "thread.id": mock.ANY, - "thread.name": "MainThread", - }, - }, - "runtime": { - "name": "CPython", - "version": mock.ANY, - "build": mock.ANY, - }, - }, - "user": { - "id": "123", - "email": "jane.doe@example.com", - "ip_address": "[Filtered]", - }, - "transaction": "test_transaction", - "transaction_info": {"source": "custom"}, - "tags": {"tag1": "tag1_value", "tag2": "tag2_value"}, - "extra": { - "extra1": "extra1_value", - "extra2": "extra2_value", - "should_be_removed_by_event_scrubber": "[Filtered]", - "sys.argv": "[Filtered]", - }, - "breadcrumbs": { - "values": [ - { - "category": "error-level", - "message": "Authenticated user %s", - "level": "error", - "data": {"breadcrumb2": "somedata"}, - "timestamp": mock.ANY, - "type": "default", - } - ] - }, - "modules": mock.ANY, - "release": "0.1.2rc3", - "environment": "checking-compatibility-with-sdk1", - "server_name": mock.ANY, - "sdk": { - "name": "sentry.python", - "version": mock.ANY, - "packages": [{"name": "pypi:sentry-sdk", "version": mock.ANY}], - "integrations": integrations, - }, - "platform": "python", - "_meta": { - "user": {"ip_address": {"": {"rem": [["!config", "s"]]}}}, - "extra": { - "should_be_removed_by_event_scrubber": { - "": {"rem": [["!config", "s"]]} - }, - "sys.argv": {"": {"rem": [["!config", "s"]]}}, - }, - }, - } - - return create_expected_error_event - - -@pytest.fixture -def expected_transaction(integrations): - def create_expected_transaction_event(trx, span): - return { - "type": "transaction", - "transaction": "test_transaction changed by before_send_transaction", - "transaction_info": {"source": "custom"}, - "contexts": { - "trace": { - "trace_id": trx.trace_id, - "span_id": trx.span_id, - "parent_span_id": None, - "op": "test_transaction_op", - "origin": "manual", - "description": None, - "data": { - "thread.id": mock.ANY, - "thread.name": "MainThread", - }, - }, - "character": { - "name": "Mighty Fighter changed by before_send_transaction", - "age": 19, - "attack_type": "melee", - }, - "runtime": { - "name": "CPython", - "version": mock.ANY, - "build": mock.ANY, - }, - }, - "tags": {"tag1": "tag1_value", "tag2": "tag2_value"}, - "timestamp": mock.ANY, - "start_timestamp": mock.ANY, - "spans": [ - { - "data": { - "thread.id": mock.ANY, - "thread.name": "MainThread", - }, - "trace_id": trx.trace_id, - "span_id": span.span_id, - "parent_span_id": span.parent_span_id, - "same_process_as_parent": True, - "op": "test_span", - "origin": "manual", - "description": None, - "start_timestamp": mock.ANY, - "timestamp": mock.ANY, - } - ], - "measurements": {"memory_used": {"value": 456, "unit": "byte"}}, - "event_id": mock.ANY, - "level": "warning-X", - "user": { - "id": "123", - "email": "jane.doe@example.com", - "ip_address": "[Filtered]", - }, - "extra": { - "extra1": "extra1_value", - "extra2": "extra2_value", - "should_be_removed_by_event_scrubber": "[Filtered]", - "sys.argv": "[Filtered]", - }, - "release": "0.1.2rc3", - "environment": "checking-compatibility-with-sdk1", - "server_name": mock.ANY, - "sdk": { - "name": "sentry.python", - "version": mock.ANY, - "packages": [{"name": "pypi:sentry-sdk", "version": mock.ANY}], - "integrations": integrations, - }, - "platform": "python", - "_meta": { - "user": {"ip_address": {"": {"rem": [["!config", "s"]]}}}, - "extra": { - "should_be_removed_by_event_scrubber": { - "": {"rem": [["!config", "s"]]} - }, - "sys.argv": {"": {"rem": [["!config", "s"]]}}, - }, - }, - } - - return create_expected_transaction_event - - -def _faulty_function(): - try: - raise ValueError("This is a test exception") - except ValueError as ex: - sentry_sdk.capture_exception(ex) - - -def _test_before_send(event, hint): - event["contexts"]["character"]["name"] += " changed by before_send" - return event - - -def _test_before_send_transaction(event, hint): - event["transaction"] += " changed by before_send_transaction" - event["contexts"]["character"]["name"] += " changed by before_send_transaction" - return event - - -def _test_before_breadcrumb(breadcrumb, hint): - if breadcrumb["category"] == "info-level": - return None - return breadcrumb - - -def _generate_event_data(scope=None): - """ - Generates some data to be used in the events sent by the tests. - """ - sentry_sdk.set_level("warning-X") - - sentry_sdk.add_breadcrumb( - category="info-level", - message="Authenticated user %s", - level="info", - data={"breadcrumb1": "somedata"}, - ) - sentry_sdk.add_breadcrumb( - category="error-level", - message="Authenticated user %s", - level="error", - data={"breadcrumb2": "somedata"}, - ) - - sentry_sdk.set_context( - "character", - { - "name": "Mighty Fighter", - "age": 19, - "attack_type": "melee", - }, - ) - - sentry_sdk.set_extra("extra1", "extra1_value") - sentry_sdk.set_extra("extra2", "extra2_value") - sentry_sdk.set_extra("should_be_removed_by_event_scrubber", "XXX") - - sentry_sdk.set_tag("tag1", "tag1_value") - sentry_sdk.set_tag("tag2", "tag2_value") - - sentry_sdk.set_user( - {"id": "123", "email": "jane.doe@example.com", "ip_address": "211.161.1.124"} - ) - - sentry_sdk.set_measurement("memory_used", 456, "byte") - - if scope is not None: - scope.add_attachment(bytes=b"Hello World", filename="hello.txt") - - -def _init_sentry_sdk(sentry_init): - sentry_init( - environment="checking-compatibility-with-sdk1", - release="0.1.2rc3", - before_send=_test_before_send, - before_send_transaction=_test_before_send_transaction, - before_breadcrumb=_test_before_breadcrumb, - event_scrubber=EventScrubber( - denylist=DEFAULT_DENYLIST - + ["should_be_removed_by_event_scrubber", "sys.argv"] - ), - send_default_pii=False, - traces_sample_rate=1.0, - auto_enabling_integrations=False, - ) - - -# -# The actual Tests start here! -# - - -def test_event(sentry_init, capture_envelopes, expected_error, expected_transaction): - _init_sentry_sdk(sentry_init) - - envelopes = capture_envelopes() - - with Hub(Hub.current): - sentry_sdk.set_tag("A", 1) # will not be added - - with Hub.current: # with hub - with sentry_sdk.push_scope() as scope: - scope.set_tag("B", 1) # will not be added - - with sentry_sdk.push_scope() as scope: # push scope - with sentry_sdk.start_transaction( - name="test_transaction", op="test_transaction_op" - ) as trx: - with sentry_sdk.start_span(op="test_span") as span: - _generate_event_data(scope) - _faulty_function() - - (error_envelope, transaction_envelope) = envelopes - - error = error_envelope.get_event() - transaction = transaction_envelope.get_transaction_event() - attachment = error_envelope.items[-1] - - assert error == expected_error(trx, span) - assert transaction == expected_transaction(trx, span) - assert attachment.headers == { - "filename": "hello.txt", - "type": "attachment", - "content_type": "text/plain", - } - assert attachment.payload.bytes == b"Hello World" - - -def test_event2(sentry_init, capture_envelopes, expected_error, expected_transaction): - _init_sentry_sdk(sentry_init) - - envelopes = capture_envelopes() - - with Hub(Hub.current): - sentry_sdk.set_tag("A", 1) # will not be added - - with Hub(Hub.current): # with hub clone - with sentry_sdk.push_scope() as scope: - scope.set_tag("B", 1) # will not be added - - with sentry_sdk.push_scope() as scope: # push scope - with sentry_sdk.start_transaction( - name="test_transaction", op="test_transaction_op" - ) as trx: - with sentry_sdk.start_span(op="test_span") as span: - _generate_event_data(scope) - _faulty_function() - - (error_envelope, transaction_envelope) = envelopes - - error = error_envelope.get_event() - transaction = transaction_envelope.get_transaction_event() - attachment = error_envelope.items[-1] - - assert error == expected_error(trx, span) - assert transaction == expected_transaction(trx, span) - assert attachment.headers == { - "filename": "hello.txt", - "type": "attachment", - "content_type": "text/plain", - } - assert attachment.payload.bytes == b"Hello World" diff --git a/tests/test_api.py b/tests/test_api.py index 35c0576027..ffe1be756d 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -11,7 +11,6 @@ is_initialized, start_transaction, set_tags, - push_scope, get_global_scope, get_current_scope, get_isolation_scope, @@ -182,9 +181,3 @@ def test_set_tags(sentry_init, capture_events): "tag2": "updated", "tag3": "new", }, "Updating tags with empty dict changed tags" - - -def test_push_scope_deprecation(): - with pytest.warns(DeprecationWarning): - with push_scope(): - ... diff --git a/tests/test_basics.py b/tests/test_basics.py index cc4594d8ab..ca537ab380 100644 --- a/tests/test_basics.py +++ b/tests/test_basics.py @@ -7,14 +7,12 @@ from collections import Counter import pytest -from sentry_sdk.client import Client from tests.conftest import patch_start_tracing_child import sentry_sdk import sentry_sdk.scope from sentry_sdk import ( get_client, - push_scope, capture_event, capture_exception, capture_message, @@ -294,76 +292,6 @@ def before_breadcrumb(crumb, hint): add_breadcrumb(crumb=dict(foo=42)) -def test_push_scope(sentry_init, capture_events, suppress_deprecation_warnings): - sentry_init() - events = capture_events() - - with push_scope() as scope: - scope.level = "warning" - try: - 1 / 0 - except Exception as e: - capture_exception(e) - - (event,) = events - - assert event["level"] == "warning" - assert "exception" in event - - -def test_push_scope_null_client( - sentry_init, capture_events, suppress_deprecation_warnings -): - """ - This test can be removed when we remove push_scope and the Hub from the SDK. - """ - sentry_init() - events = capture_events() - - Hub.current.bind_client(None) - - with push_scope() as scope: - scope.level = "warning" - try: - 1 / 0 - except Exception as e: - capture_exception(e) - - assert len(events) == 0 - - -@pytest.mark.skip( - reason="This test is not valid anymore, because push_scope just returns the isolation scope. This test should be removed once the Hub is removed" -) -@pytest.mark.parametrize("null_client", (True, False)) -def test_push_scope_callback(sentry_init, null_client, capture_events): - """ - This test can be removed when we remove push_scope and the Hub from the SDK. - """ - sentry_init() - - if null_client: - Hub.current.bind_client(None) - - outer_scope = Hub.current.scope - - calls = [] - - @push_scope - def _(scope): - assert scope is Hub.current.scope - assert scope is not outer_scope - calls.append(1) - - # push_scope always needs to execute the callback regardless of - # client state, because that actually runs usercode in it, not - # just scope config code - assert calls == [1] - - # Assert scope gets popped correctly - assert Hub.current.scope is outer_scope - - def test_breadcrumbs(sentry_init, capture_events): sentry_init(max_breadcrumbs=10) events = capture_events() @@ -535,71 +463,6 @@ def test_integrations( } == expected_integrations -@pytest.mark.skip( - reason="This test is not valid anymore, because with the new Scopes calling bind_client on the Hub sets the client on the global scope. This test should be removed once the Hub is removed" -) -def test_client_initialized_within_scope(sentry_init, caplog): - """ - This test can be removed when we remove push_scope and the Hub from the SDK. - """ - caplog.set_level(logging.WARNING) - - sentry_init() - - with push_scope(): - Hub.current.bind_client(Client()) - - (record,) = (x for x in caplog.records if x.levelname == "WARNING") - - assert record.msg.startswith("init() called inside of pushed scope.") - - -@pytest.mark.skip( - reason="This test is not valid anymore, because with the new Scopes the push_scope just returns the isolation scope. This test should be removed once the Hub is removed" -) -def test_scope_leaks_cleaned_up(sentry_init, caplog): - """ - This test can be removed when we remove push_scope and the Hub from the SDK. - """ - caplog.set_level(logging.WARNING) - - sentry_init() - - old_stack = list(Hub.current._stack) - - with push_scope(): - push_scope() - - assert Hub.current._stack == old_stack - - (record,) = (x for x in caplog.records if x.levelname == "WARNING") - - assert record.message.startswith("Leaked 1 scopes:") - - -@pytest.mark.skip( - reason="This test is not valid anymore, because with the new Scopes there is not pushing and popping of scopes. This test should be removed once the Hub is removed" -) -def test_scope_popped_too_soon(sentry_init, caplog): - """ - This test can be removed when we remove push_scope and the Hub from the SDK. - """ - caplog.set_level(logging.ERROR) - - sentry_init() - - old_stack = list(Hub.current._stack) - - with push_scope(): - Hub.current.pop_scope_unsafe() - - assert Hub.current._stack == old_stack - - (record,) = (x for x in caplog.records if x.levelname == "ERROR") - - assert record.message == ("Scope popped too soon. Popped 1 scopes too many.") - - def test_scope_event_processor_order(sentry_init, capture_events): def before_send(event, hint): event["message"] += "baz" From 062a155f35dab881c9144f9c1b7b7323ca726c69 Mon Sep 17 00:00:00 2001 From: Daniel Szoke <7881302+szokeasaurusrex@users.noreply.github.com> Date: Fri, 9 Aug 2024 12:06:42 +0200 Subject: [PATCH 010/244] feat(transport): Remove `HttpTransport` `hub_cls` attribute (#3412) This change is a prerequisite for #3404. BREAKING CHANGE: Remove `sentry_sdk.transport.HttpTransport`'s `hub_cls` attribute. --- sentry_sdk/transport.py | 28 ---------------------------- tests/test_transport.py | 20 -------------------- 2 files changed, 48 deletions(-) diff --git a/sentry_sdk/transport.py b/sentry_sdk/transport.py index e5c39c48e4..1f83b5d01f 100644 --- a/sentry_sdk/transport.py +++ b/sentry_sdk/transport.py @@ -12,7 +12,6 @@ import urllib3 import certifi -import sentry_sdk from sentry_sdk.consts import EndpointType from sentry_sdk.utils import Dsn, logger, capture_internal_exceptions from sentry_sdk.worker import BackgroundWorker @@ -231,9 +230,6 @@ def __init__( proxy_headers=options["proxy_headers"], ) - # Backwards compatibility for deprecated `self.hub_class` attribute - self._hub_cls = sentry_sdk.Hub - def record_lost_event( self, reason, # type: str @@ -604,30 +600,6 @@ def kill(self): logger.debug("Killing HTTP transport") self._worker.kill() - @staticmethod - def _warn_hub_cls(): - # type: () -> None - """Convenience method to warn users about the deprecation of the `hub_cls` attribute.""" - warnings.warn( - "The `hub_cls` attribute is deprecated and will be removed in a future release.", - DeprecationWarning, - stacklevel=3, - ) - - @property - def hub_cls(self): - # type: () -> type[sentry_sdk.Hub] - """DEPRECATED: This attribute is deprecated and will be removed in a future release.""" - HttpTransport._warn_hub_cls() - return self._hub_cls - - @hub_cls.setter - def hub_cls(self, value): - # type: (type[sentry_sdk.Hub]) -> None - """DEPRECATED: This attribute is deprecated and will be removed in a future release.""" - HttpTransport._warn_hub_cls() - self._hub_cls = value - class _FunctionTransport(Transport): """ diff --git a/tests/test_transport.py b/tests/test_transport.py index 2e2ad3c4cd..d4522de942 100644 --- a/tests/test_transport.py +++ b/tests/test_transport.py @@ -18,13 +18,11 @@ capture_message, isolation_scope, get_isolation_scope, - Hub, ) from sentry_sdk.envelope import Envelope, Item, parse_json from sentry_sdk.transport import ( KEEP_ALIVE_SOCKET_OPTIONS, _parse_rate_limits, - HttpTransport, ) from sentry_sdk.integrations.logging import LoggingIntegration, ignore_logger @@ -649,24 +647,6 @@ def test_metric_bucket_limits_with_all_namespaces( ] -def test_hub_cls_backwards_compat(): - class TestCustomHubClass(Hub): - pass - - transport = HttpTransport( - defaultdict(lambda: None, {"dsn": "https://123abc@example.com/123"}) - ) - - with pytest.deprecated_call(): - assert transport.hub_cls is Hub - - with pytest.deprecated_call(): - transport.hub_cls = TestCustomHubClass - - with pytest.deprecated_call(): - assert transport.hub_cls is TestCustomHubClass - - @pytest.mark.parametrize("quantity", (1, 2, 10)) def test_record_lost_event_quantity(capturing_server, make_client, quantity): client = make_client() From da00efcb242773ee9b908a305ef600ef1147f6dd Mon Sep 17 00:00:00 2001 From: Ivana Kellyer Date: Mon, 12 Aug 2024 16:19:36 +0200 Subject: [PATCH 011/244] Start a migration guide (#3440) --- MIGRATION_GUIDE.md | 26 +++++++++++++++++++++----- 1 file changed, 21 insertions(+), 5 deletions(-) diff --git a/MIGRATION_GUIDE.md b/MIGRATION_GUIDE.md index 53396a37ba..df3ee6ea7d 100644 --- a/MIGRATION_GUIDE.md +++ b/MIGRATION_GUIDE.md @@ -1,13 +1,29 @@ -# Sentry SDK 2.0 Migration Guide +# Sentry SDK Migration Guide + + +## Upgrading to 3.0 + +Looking to upgrade from Sentry SDK 2.x to 3.x? Here's a comprehensive list of what's changed. Looking for a more digestable summary? See the [guide in the docs](https://docs.sentry.io/platforms/python/migration/2.x-to-3.x) with the most common migration patterns. + +### New Features + +### Changed + +### Removed + +### Deprecated + + +## Upgrading to 2.0 Looking to upgrade from Sentry SDK 1.x to 2.x? Here's a comprehensive list of what's changed. Looking for a more digestable summary? See the [guide in the docs](https://docs.sentry.io/platforms/python/migration/1.x-to-2.x) with the most common migration patterns. -## New Features +### New Features - Additional integrations will now be activated automatically if the SDK detects the respective package is installed: Ariadne, ARQ, asyncpg, Chalice, clickhouse-driver, GQL, Graphene, huey, Loguru, PyMongo, Quart, Starlite, Strawberry. - While refactoring the [inner workings](https://docs.sentry.io/platforms/python/enriching-events/scopes/) of the SDK we added new top-level APIs for custom instrumentation called `new_scope` and `isolation_scope`. See the [Deprecated](#deprecated) section to see how they map to the existing APIs. -## Changed +### Changed - The Pyramid integration will not capture errors that might happen in `authenticated_userid()` in a custom `AuthenticationPolicy` class. - The method `need_code_loation` of the `MetricsAggregator` was renamed to `need_code_location`. @@ -59,7 +75,7 @@ Looking to upgrade from Sentry SDK 1.x to 2.x? Here's a comprehensive list of wh -## Removed +### Removed - Removed support for Python 2 and Python 3.5. The SDK now requires at least Python 3.6. - Removed support for Celery 3.\*. @@ -82,7 +98,7 @@ Looking to upgrade from Sentry SDK 1.x to 2.x? Here's a comprehensive list of wh - Removed the experimental `metrics_summary_sample_rate` config option. - Removed the experimental `should_summarize_metric` config option. -## Deprecated +### Deprecated - Using the `Hub` directly as well as using hub-based APIs has been deprecated. Where available, use [the top-level API instead](sentry_sdk/api.py); otherwise use the [scope API](sentry_sdk/scope.py) or the [client API](sentry_sdk/client.py). From a576bdc040ac51f10e21447b9a5f43f89d654c8f Mon Sep 17 00:00:00 2001 From: Ivana Kellyer Date: Mon, 12 Aug 2024 16:20:59 +0200 Subject: [PATCH 012/244] Drop Python 3.6 (#3434) --- .../test-integrations-cloud-computing.yml | 2 +- .../workflows/test-integrations-common.yml | 2 +- .../test-integrations-data-processing.yml | 4 +- .../workflows/test-integrations-databases.yml | 2 +- .../test-integrations-miscellaneous.yml | 4 +- .../test-integrations-networking.yml | 2 +- .../test-integrations-web-frameworks-1.yml | 2 +- .../test-integrations-web-frameworks-2.yml | 4 +- setup.py | 3 +- tox.ini | 104 ++++++++---------- 10 files changed, 59 insertions(+), 70 deletions(-) diff --git a/.github/workflows/test-integrations-cloud-computing.yml b/.github/workflows/test-integrations-cloud-computing.yml index 86ecab6f8e..178f2a74b1 100644 --- a/.github/workflows/test-integrations-cloud-computing.yml +++ b/.github/workflows/test-integrations-cloud-computing.yml @@ -85,7 +85,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: ["3.6","3.7","3.9","3.11","3.12"] + python-version: ["3.7","3.9","3.11","3.12"] # python3.6 reached EOL and is no longer being supported on # new versions of hosted runners on Github Actions # ubuntu-20.04 is the last version that supported python3.6 diff --git a/.github/workflows/test-integrations-common.yml b/.github/workflows/test-integrations-common.yml index 52baefd5b1..5b21ed27af 100644 --- a/.github/workflows/test-integrations-common.yml +++ b/.github/workflows/test-integrations-common.yml @@ -27,7 +27,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: ["3.6","3.7","3.8","3.9","3.10","3.11","3.12","3.13"] + python-version: ["3.7","3.8","3.9","3.10","3.11","3.12","3.13"] # python3.6 reached EOL and is no longer being supported on # new versions of hosted runners on Github Actions # ubuntu-20.04 is the last version that supported python3.6 diff --git a/.github/workflows/test-integrations-data-processing.yml b/.github/workflows/test-integrations-data-processing.yml index 617dc7997a..9ae3775546 100644 --- a/.github/workflows/test-integrations-data-processing.yml +++ b/.github/workflows/test-integrations-data-processing.yml @@ -27,7 +27,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: ["3.6","3.7","3.8","3.10","3.11","3.12"] + python-version: ["3.7","3.8","3.10","3.11","3.12"] # python3.6 reached EOL and is no longer being supported on # new versions of hosted runners on Github Actions # ubuntu-20.04 is the last version that supported python3.6 @@ -99,7 +99,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: ["3.6","3.7","3.8","3.9","3.10","3.11","3.12"] + python-version: ["3.7","3.8","3.9","3.10","3.11","3.12"] # python3.6 reached EOL and is no longer being supported on # new versions of hosted runners on Github Actions # ubuntu-20.04 is the last version that supported python3.6 diff --git a/.github/workflows/test-integrations-databases.yml b/.github/workflows/test-integrations-databases.yml index d740912829..a8fb56d67b 100644 --- a/.github/workflows/test-integrations-databases.yml +++ b/.github/workflows/test-integrations-databases.yml @@ -112,7 +112,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: ["3.6","3.7","3.8","3.9","3.10","3.11","3.12"] + python-version: ["3.7","3.8","3.9","3.10","3.11","3.12"] # python3.6 reached EOL and is no longer being supported on # new versions of hosted runners on Github Actions # ubuntu-20.04 is the last version that supported python3.6 diff --git a/.github/workflows/test-integrations-miscellaneous.yml b/.github/workflows/test-integrations-miscellaneous.yml index f5148fb2c8..4e38cfbe4b 100644 --- a/.github/workflows/test-integrations-miscellaneous.yml +++ b/.github/workflows/test-integrations-miscellaneous.yml @@ -27,7 +27,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: ["3.6","3.8","3.11","3.12"] + python-version: ["3.7","3.8","3.11","3.12"] # python3.6 reached EOL and is no longer being supported on # new versions of hosted runners on Github Actions # ubuntu-20.04 is the last version that supported python3.6 @@ -89,7 +89,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: ["3.6","3.7","3.8","3.9","3.10","3.11","3.12"] + python-version: ["3.7","3.8","3.9","3.10","3.11","3.12"] # python3.6 reached EOL and is no longer being supported on # new versions of hosted runners on Github Actions # ubuntu-20.04 is the last version that supported python3.6 diff --git a/.github/workflows/test-integrations-networking.yml b/.github/workflows/test-integrations-networking.yml index 6a55ffadd8..c0b0ee0c7b 100644 --- a/.github/workflows/test-integrations-networking.yml +++ b/.github/workflows/test-integrations-networking.yml @@ -85,7 +85,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: ["3.6","3.7","3.8","3.9","3.10","3.11","3.12"] + python-version: ["3.7","3.8","3.9","3.10","3.11","3.12"] # python3.6 reached EOL and is no longer being supported on # new versions of hosted runners on Github Actions # ubuntu-20.04 is the last version that supported python3.6 diff --git a/.github/workflows/test-integrations-web-frameworks-1.yml b/.github/workflows/test-integrations-web-frameworks-1.yml index 246248a700..d3e4e5ca08 100644 --- a/.github/workflows/test-integrations-web-frameworks-1.yml +++ b/.github/workflows/test-integrations-web-frameworks-1.yml @@ -103,7 +103,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: ["3.6","3.7","3.8","3.9","3.10","3.11","3.12"] + python-version: ["3.7","3.8","3.9","3.10","3.11","3.12"] # python3.6 reached EOL and is no longer being supported on # new versions of hosted runners on Github Actions # ubuntu-20.04 is the last version that supported python3.6 diff --git a/.github/workflows/test-integrations-web-frameworks-2.yml b/.github/workflows/test-integrations-web-frameworks-2.yml index cfc03a935a..9f6505aa10 100644 --- a/.github/workflows/test-integrations-web-frameworks-2.yml +++ b/.github/workflows/test-integrations-web-frameworks-2.yml @@ -27,7 +27,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: ["3.6","3.7","3.8","3.11","3.12"] + python-version: ["3.7","3.8","3.11","3.12"] # python3.6 reached EOL and is no longer being supported on # new versions of hosted runners on Github Actions # ubuntu-20.04 is the last version that supported python3.6 @@ -109,7 +109,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: ["3.6","3.7","3.8","3.9","3.11","3.12"] + python-version: ["3.7","3.8","3.9","3.11","3.12"] # python3.6 reached EOL and is no longer being supported on # new versions of hosted runners on Github Actions # ubuntu-20.04 is the last version that supported python3.6 diff --git a/setup.py b/setup.py index 68da68a52b..966045c60c 100644 --- a/setup.py +++ b/setup.py @@ -37,7 +37,7 @@ def get_file_text(file_name): package_data={"sentry_sdk": ["py.typed"]}, zip_safe=False, license="MIT", - python_requires=">=3.6", + python_requires=">=3.7", install_requires=[ "urllib3>=1.26.11", "certifi", @@ -91,7 +91,6 @@ def get_file_text(file_name): "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", diff --git a/tox.ini b/tox.ini index e01ee4b6ae..b980ed43d2 100644 --- a/tox.ini +++ b/tox.ini @@ -9,10 +9,10 @@ requires = virtualenv<20.26.3 envlist = # === Common === - {py3.6,py3.7,py3.8,py3.9,py3.10,py3.11,py3.12,py3.13}-common + {py3.7,py3.8,py3.9,py3.10,py3.11,py3.12,py3.13}-common # === Gevent === - {py3.6,py3.8,py3.10,py3.11,py3.12}-gevent + {py3.8,py3.10,py3.11,py3.12}-gevent # === Integrations === # General format is {pythonversion}-{integrationname}-v{frameworkversion} @@ -62,24 +62,24 @@ envlist = {py3.8,py3.11}-beam-latest # Boto3 - {py3.6,py3.7}-boto3-v{1.12} + {py3.7}-boto3-v{1.12} {py3.7,py3.11,py3.12}-boto3-v{1.23} {py3.11,py3.12}-boto3-v{1.34} {py3.11,py3.12}-boto3-latest # Bottle - {py3.6,py3.9}-bottle-v{0.12} - {py3.6,py3.11,py3.12}-bottle-latest + {py3.7,py3.9}-bottle-v{0.12} + {py3.7,py3.11,py3.12}-bottle-latest # Celery - {py3.6,py3.8}-celery-v{4} - {py3.6,py3.8}-celery-v{5.0} + {py3.7,py3.8}-celery-v{4} + {py3.7,py3.8}-celery-v{5.0} {py3.7,py3.10}-celery-v{5.1,5.2} {py3.8,py3.11,py3.12}-celery-v{5.3,5.4} {py3.8,py3.11,py3.12}-celery-latest # Chalice - {py3.6,py3.9}-chalice-v{1.16} + {py3.7,py3.9}-chalice-v{1.16} {py3.8,py3.12}-chalice-latest # Clickhouse Driver @@ -87,7 +87,7 @@ envlist = {py3.8,py3.11,py3.12}-clickhouse_driver-latest # Cloud Resource Context - {py3.6,py3.11,py3.12}-cloud_resource_context + {py3.7,py3.11,py3.12}-cloud_resource_context # Cohere {py3.9,py3.11,py3.12}-cohere-v5 @@ -95,13 +95,13 @@ envlist = # Django # - Django 1.x - {py3.6,py3.7}-django-v{1.11} + {py3.7}-django-v{1.11} # - Django 2.x - {py3.6,py3.7}-django-v{2.0} - {py3.6,py3.9}-django-v{2.2} + {py3.7}-django-v{2.0} + {py3.7,py3.9}-django-v{2.2} # - Django 3.x - {py3.6,py3.9}-django-v{3.0} - {py3.6,py3.9,py3.11}-django-v{3.2} + {py3.7,py3.9}-django-v{3.0} + {py3.7,py3.9,py3.11}-django-v{3.2} # - Django 4.x {py3.8,py3.11,py3.12}-django-v{4.0,4.1,4.2} # - Django 5.x @@ -109,14 +109,14 @@ envlist = {py3.10,py3.11,py3.12}-django-latest # dramatiq - {py3.6,py3.9}-dramatiq-v{1.13} + {py3.7,py3.9}-dramatiq-v{1.13} {py3.7,py3.10,py3.11}-dramatiq-v{1.15} {py3.8,py3.11,py3.12}-dramatiq-v{1.17} {py3.8,py3.11,py3.12}-dramatiq-latest # Falcon - {py3.6,py3.7}-falcon-v{1,1.4,2} - {py3.6,py3.11,py3.12}-falcon-v{3} + {py3.7}-falcon-v{1,1.4,2} + {py3.7,py3.11,py3.12}-falcon-v{3} {py3.7,py3.11,py3.12}-falcon-latest # FastAPI @@ -124,7 +124,7 @@ envlist = {py3.8,py3.11,py3.12}-fastapi-latest # Flask - {py3.6,py3.8}-flask-v{1} + {py3.7,py3.8}-flask-v{1} {py3.8,py3.11,py3.12}-flask-v{2} {py3.10,py3.11,py3.12}-flask-v{3} {py3.10,py3.11,py3.12}-flask-latest @@ -147,15 +147,15 @@ envlist = {py3.8,py3.11,py3.12}-grpc-latest # HTTPX - {py3.6,py3.9}-httpx-v{0.16,0.18} - {py3.6,py3.10}-httpx-v{0.20,0.22} + {py3.7,py3.9}-httpx-v{0.16,0.18} + {py3.7,py3.10}-httpx-v{0.20,0.22} {py3.7,py3.11,py3.12}-httpx-v{0.23,0.24} {py3.9,py3.11,py3.12}-httpx-v{0.25,0.27} {py3.9,py3.11,py3.12}-httpx-latest # Huey - {py3.6,py3.11,py3.12}-huey-v{2.0} - {py3.6,py3.11,py3.12}-huey-latest + {py3.7,py3.11,py3.12}-huey-v{2.0} + {py3.7,py3.11,py3.12}-huey-latest # Huggingface Hub {py3.9,py3.11,py3.12}-huggingface_hub-{v0.22,latest} @@ -174,8 +174,8 @@ envlist = {py3.8,py3.11,py3.12}-litestar-latest # Loguru - {py3.6,py3.11,py3.12}-loguru-v{0.5} - {py3.6,py3.11,py3.12}-loguru-latest + {py3.7,py3.11,py3.12}-loguru-v{0.5} + {py3.7,py3.11,py3.12}-loguru-latest # OpenAI {py3.9,py3.11,py3.12}-openai-v1 @@ -190,20 +190,20 @@ envlist = {py3.8,py3.9,py3.10,py3.11}-potel # pure_eval - {py3.6,py3.11,py3.12}-pure_eval + {py3.7,py3.11,py3.12}-pure_eval # PyMongo (Mongo DB) - {py3.6}-pymongo-v{3.1} - {py3.6,py3.9}-pymongo-v{3.12} - {py3.6,py3.11}-pymongo-v{4.0} + {py3.7}-pymongo-v{3.7} + {py3.7,py3.9}-pymongo-v{3.12} + {py3.7,py3.11}-pymongo-v{4.0} {py3.7,py3.11,py3.12}-pymongo-v{4.3,4.7} {py3.7,py3.11,py3.12}-pymongo-latest # Pyramid - {py3.6,py3.11}-pyramid-v{1.6} - {py3.6,py3.11,py3.12}-pyramid-v{1.10} - {py3.6,py3.11,py3.12}-pyramid-v{2.0} - {py3.6,py3.11,py3.12}-pyramid-latest + {py3.7,py3.11}-pyramid-v{1.6} + {py3.7,py3.11,py3.12}-pyramid-v{1.10} + {py3.7,py3.11,py3.12}-pyramid-v{2.0} + {py3.7,py3.11,py3.12}-pyramid-latest # Quart {py3.7,py3.11}-quart-v{0.16} @@ -211,28 +211,27 @@ envlist = {py3.8,py3.11,py3.12}-quart-latest # Redis - {py3.6,py3.8}-redis-v{3} + {py3.7,py3.8}-redis-v{3} {py3.7,py3.8,py3.11}-redis-v{4} {py3.7,py3.11,py3.12}-redis-v{5} {py3.7,py3.11,py3.12}-redis-latest # Redis Cluster - {py3.6,py3.8}-redis_py_cluster_legacy-v{1,2} + {py3.7,py3.8}-redis_py_cluster_legacy-v{1,2} # no -latest, not developed anymore # Requests - {py3.6,py3.8,py3.11,py3.12}-requests + {py3.7,py3.8,py3.11,py3.12}-requests # RQ (Redis Queue) - {py3.6}-rq-v{0.6} - {py3.6,py3.9}-rq-v{0.13,1.0} - {py3.6,py3.11}-rq-v{1.5,1.10} + {py3.7,py3.9}-rq-v{0.13,1.0} + {py3.7,py3.11}-rq-v{1.5,1.10} {py3.7,py3.11,py3.12}-rq-v{1.15,1.16} {py3.7,py3.11,py3.12}-rq-latest # Sanic - {py3.6,py3.7}-sanic-v{0.8} - {py3.6,py3.8}-sanic-v{20} + {py3.7}-sanic-v{0.8} + {py3.8}-sanic-v{20} {py3.7,py3.11}-sanic-v{22} {py3.7,py3.11}-sanic-v{23} {py3.8,py3.11}-sanic-latest @@ -252,7 +251,7 @@ envlist = # 1.51.14 is the last starlite version; the project continues as litestar # SQL Alchemy - {py3.6,py3.9}-sqlalchemy-v{1.2,1.4} + {py3.7,py3.9}-sqlalchemy-v{1.2,1.4} {py3.7,py3.11}-sqlalchemy-v{2.0} {py3.7,py3.11,py3.12}-sqlalchemy-latest @@ -267,9 +266,8 @@ envlist = {py3.8,py3.11,py3.12}-tornado-latest # Trytond - {py3.6}-trytond-v{4} - {py3.6,py3.8}-trytond-v{5} - {py3.6,py3.11}-trytond-v{6} + {py3.7,py3.8}-trytond-v{5} + {py3.7,py3.11}-trytond-v{6} {py3.8,py3.11,py3.12}-trytond-v{7} {py3.8,py3.11,py3.12}-trytond-latest @@ -285,20 +283,20 @@ deps = # === Common === py3.8-common: hypothesis - {py3.6,py3.7,py3.8,py3.9,py3.10,py3.11,py3.12,py3.13}-common: pytest-asyncio + {py3.7,py3.8,py3.9,py3.10,py3.11,py3.12,py3.13}-common: pytest-asyncio # See https://github.com/pytest-dev/pytest/issues/9621 # and https://github.com/pytest-dev/pytest-forked/issues/67 # for justification of the upper bound on pytest - {py3.6,py3.7,py3.8,py3.9,py3.10,py3.11,py3.12}-common: pytest<7.0.0 + {py3.7,py3.8,py3.9,py3.10,py3.11,py3.12}-common: pytest<7.0.0 py3.13-common: pytest # === Gevent === - {py3.6,py3.7,py3.8,py3.9,py3.10,py3.11}-gevent: gevent>=22.10.0, <22.11.0 + {py3.7,py3.8,py3.9,py3.10,py3.11}-gevent: gevent>=22.10.0, <22.11.0 {py3.12}-gevent: gevent # See https://github.com/pytest-dev/pytest/issues/9621 # and https://github.com/pytest-dev/pytest-forked/issues/67 # for justification of the upper bound on pytest - {py3.6,py3.7,py3.8,py3.9,py3.10,py3.11,py3.12}-gevent: pytest<7.0.0 + {py3.7,py3.8,py3.9,py3.10,py3.11,py3.12}-gevent: pytest<7.0.0 # === Integrations === @@ -368,7 +366,7 @@ deps = celery-latest: Celery {py3.7}-celery: importlib-metadata<5.0 - {py3.6,py3.7,py3.8,py3.9,py3.10,py3.11,py3.12}-celery: newrelic + {py3.7,py3.8,py3.9,py3.10,py3.11,py3.12}-celery: newrelic # Chalice chalice-v1.16: chalice~=1.16.0 @@ -584,13 +582,9 @@ deps = requests: requests>=2.0 # RQ (Redis Queue) - # https://github.com/jamesls/fakeredis/issues/245 - rq-v{0.6}: fakeredis<1.0 - rq-v{0.6}: redis<3.2.2 rq-v{0.13,1.0,1.5,1.10}: fakeredis>=1.0,<1.7.4 rq-v{1.15,1.16}: fakeredis rq-latest: fakeredis - rq-v0.6: rq~=0.6.0 rq-v0.13: rq~=0.13.0 rq-v1.0: rq~=1.0.0 rq-v1.5: rq~=1.5.0 @@ -604,7 +598,6 @@ deps = sanic: aiohttp sanic-v{22,23}: sanic_testing sanic-latest: sanic_testing - {py3.6}-sanic: aiocontextvars==0.2.1 sanic-v0.8: sanic~=0.8.0 sanic-v20: sanic~=20.0 sanic-v22: sanic~=22.0 @@ -664,8 +657,6 @@ deps = # Trytond trytond: werkzeug - trytond-v4: werkzeug<1.0 - trytond-v4: trytond~=4.0 trytond-v5: trytond~=5.0 trytond-v6: trytond~=6.0 trytond-v7: trytond~=7.0 @@ -748,7 +739,6 @@ extras = pymongo: pymongo basepython = - py3.6: python3.6 py3.7: python3.7 py3.8: python3.8 py3.9: python3.9 From 3c5f761d15ea15f35bbed9e92e9be61e65fb839c Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Wed, 14 Aug 2024 10:18:43 +0200 Subject: [PATCH 013/244] Remove `Hub` and related code for good. (#3446) * Removed hub based functions from sessions implementation * Removed scope manager * Removed hub from tracing * Removed hub from apidocs * Updated migration guide * Updated migration guide --- MIGRATION_GUIDE.md | 12 + docs/apidocs.rst | 3 - sentry_sdk/__init__.py | 4 - sentry_sdk/hub.py | 736 ------------------ sentry_sdk/integrations/sanic.py | 2 +- sentry_sdk/profiler/transaction_profiler.py | 32 - sentry_sdk/scope.py | 22 - sentry_sdk/sessions.py | 87 --- sentry_sdk/tracing.py | 69 -- sentry_sdk/utils.py | 6 +- tests/integrations/conftest.py | 12 - tests/new_scopes_compat/__init__.py | 7 - tests/new_scopes_compat/conftest.py | 8 - .../test_new_scopes_compat.py | 217 ------ tests/profiler/test_transaction_profiler.py | 19 - tests/test_basics.py | 19 - tests/test_sessions.py | 107 +-- tests/tracing/test_decorator.py | 1 + tests/tracing/test_deprecated.py | 59 -- 19 files changed, 21 insertions(+), 1401 deletions(-) delete mode 100644 sentry_sdk/hub.py delete mode 100644 tests/new_scopes_compat/__init__.py delete mode 100644 tests/new_scopes_compat/conftest.py delete mode 100644 tests/new_scopes_compat/test_new_scopes_compat.py delete mode 100644 tests/tracing/test_deprecated.py diff --git a/MIGRATION_GUIDE.md b/MIGRATION_GUIDE.md index df3ee6ea7d..c2e071a078 100644 --- a/MIGRATION_GUIDE.md +++ b/MIGRATION_GUIDE.md @@ -9,8 +9,20 @@ Looking to upgrade from Sentry SDK 2.x to 3.x? Here's a comprehensive list of wh ### Changed +- The `Span()` constructor does not accept a `hub` parameter anymore. +- `Span.finish()` does not accept a `hub` parameter anymore. +- The `Profile()` constructor does not accept a `hub` parameter anymore. +- A `Profile` object does not have a `.hub` property anymore. + ### Removed +- Class `Hub` has been removed. +- Class `_ScopeManager` has been removed. +- The context manager `auto_session_tracking()` has been removed. Use `track_session()` instead. +- The context manager `auto_session_tracking_scope()` has been removed. Use `track_session()` instead. +- Utility function `is_auto_session_tracking_enabled()` has been removed. There is no public replacement. There is a private `_is_auto_session_tracking_enabled()` (if you absolutely need this function) It accepts a `scope` parameter instead of the previously used `hub` parameter. +- Utility function `is_auto_session_tracking_enabled_scope()` has been removed. There is no public replacement. There is a private `_is_auto_session_tracking_enabled()` (if you absolutely need this function) + ### Deprecated diff --git a/docs/apidocs.rst b/docs/apidocs.rst index a3c8a6e150..ffe265b276 100644 --- a/docs/apidocs.rst +++ b/docs/apidocs.rst @@ -2,9 +2,6 @@ API Docs ======== -.. autoclass:: sentry_sdk.Hub - :members: - .. autoclass:: sentry_sdk.Scope :members: diff --git a/sentry_sdk/__init__.py b/sentry_sdk/__init__.py index 8ff8dc703e..90cb01ec17 100644 --- a/sentry_sdk/__init__.py +++ b/sentry_sdk/__init__.py @@ -7,7 +7,6 @@ from sentry_sdk.consts import VERSION # noqa __all__ = [ # noqa - "Hub", "Scope", "Client", "Transport", @@ -50,6 +49,3 @@ init_debug_support() del init_debug_support - -# circular imports -from sentry_sdk.hub import Hub diff --git a/sentry_sdk/hub.py b/sentry_sdk/hub.py deleted file mode 100644 index 1493c53992..0000000000 --- a/sentry_sdk/hub.py +++ /dev/null @@ -1,736 +0,0 @@ -import warnings - -# Importing sentry_sdk.consts here prevents a circular import, even though it's not used in this file. -import sentry_sdk.consts # noqa: F401 - -from contextlib import contextmanager - -from sentry_sdk import ( - get_client, - get_global_scope, - get_isolation_scope, - get_current_scope, -) -from sentry_sdk._compat import with_metaclass -from sentry_sdk.scope import _ScopeManager -from sentry_sdk.client import Client -from sentry_sdk.tracing import ( - NoOpSpan, - Span, - Transaction, -) - -from sentry_sdk.utils import ( - logger, - ContextVar, -) - -from sentry_sdk._types import TYPE_CHECKING - -if TYPE_CHECKING: - from typing import Any - from typing import Callable - from typing import ContextManager - from typing import Dict - from typing import Generator - from typing import List - from typing import Optional - from typing import overload - from typing import Tuple - from typing import Type - from typing import TypeVar - from typing import Union - - from typing_extensions import Unpack - - from sentry_sdk.scope import Scope - from sentry_sdk.client import BaseClient - from sentry_sdk.integrations import Integration - from sentry_sdk._types import ( - Event, - Hint, - Breadcrumb, - BreadcrumbHint, - ExcInfo, - LogLevelStr, - SamplingContext, - ) - from sentry_sdk.tracing import TransactionKwargs - - T = TypeVar("T") - -else: - - def overload(x): - # type: (T) -> T - return x - - -class SentryHubDeprecationWarning(DeprecationWarning): - """ - A custom deprecation warning to inform users that the Hub is deprecated. - """ - - _MESSAGE = ( - "`sentry_sdk.Hub` is deprecated and will be removed in a future major release. " - "Please consult our 1.x to 2.x migration guide for details on how to migrate " - "`Hub` usage to the new API: " - "https://docs.sentry.io/platforms/python/migration/1.x-to-2.x" - ) - - def __init__(self, *_): - # type: (*object) -> None - super().__init__(self._MESSAGE) - - -@contextmanager -def _suppress_hub_deprecation_warning(): - # type: () -> Generator[None, None, None] - """Utility function to suppress deprecation warnings for the Hub.""" - with warnings.catch_warnings(): - warnings.filterwarnings("ignore", category=SentryHubDeprecationWarning) - yield - - -_local = ContextVar("sentry_current_hub") - - -class HubMeta(type): - @property - def current(cls): - # type: () -> Hub - """Returns the current instance of the hub.""" - warnings.warn(SentryHubDeprecationWarning(), stacklevel=2) - rv = _local.get(None) - if rv is None: - with _suppress_hub_deprecation_warning(): - # This will raise a deprecation warning; supress it since we already warned above. - rv = Hub(GLOBAL_HUB) - _local.set(rv) - return rv - - @property - def main(cls): - # type: () -> Hub - """Returns the main instance of the hub.""" - warnings.warn(SentryHubDeprecationWarning(), stacklevel=2) - return GLOBAL_HUB - - -class Hub(with_metaclass(HubMeta)): # type: ignore - """ - .. deprecated:: 2.0.0 - The Hub is deprecated. Its functionality will be merged into :py:class:`sentry_sdk.scope.Scope`. - - The hub wraps the concurrency management of the SDK. Each thread has - its own hub but the hub might transfer with the flow of execution if - context vars are available. - - If the hub is used with a with statement it's temporarily activated. - """ - - _stack = None # type: List[Tuple[Optional[Client], Scope]] - _scope = None # type: Optional[Scope] - - # Mypy doesn't pick up on the metaclass. - - if TYPE_CHECKING: - current = None # type: Hub - main = None # type: Hub - - def __init__( - self, - client_or_hub=None, # type: Optional[Union[Hub, Client]] - scope=None, # type: Optional[Any] - ): - # type: (...) -> None - warnings.warn(SentryHubDeprecationWarning(), stacklevel=2) - - current_scope = None - - if isinstance(client_or_hub, Hub): - client = get_client() - if scope is None: - # hub cloning is going on, we use a fork of the current/isolation scope for context manager - scope = get_isolation_scope().fork() - current_scope = get_current_scope().fork() - else: - client = client_or_hub # type: ignore - get_global_scope().set_client(client) - - if scope is None: # so there is no Hub cloning going on - # just the current isolation scope is used for context manager - scope = get_isolation_scope() - current_scope = get_current_scope() - - if current_scope is None: - # just the current current scope is used for context manager - current_scope = get_current_scope() - - self._stack = [(client, scope)] # type: ignore - self._last_event_id = None # type: Optional[str] - self._old_hubs = [] # type: List[Hub] - - self._old_current_scopes = [] # type: List[Scope] - self._old_isolation_scopes = [] # type: List[Scope] - self._current_scope = current_scope # type: Scope - self._scope = scope # type: Scope - - def __enter__(self): - # type: () -> Hub - self._old_hubs.append(Hub.current) - _local.set(self) - - current_scope = get_current_scope() - self._old_current_scopes.append(current_scope) - scope._current_scope.set(self._current_scope) - - isolation_scope = get_isolation_scope() - self._old_isolation_scopes.append(isolation_scope) - scope._isolation_scope.set(self._scope) - - return self - - def __exit__( - self, - exc_type, # type: Optional[type] - exc_value, # type: Optional[BaseException] - tb, # type: Optional[Any] - ): - # type: (...) -> None - old = self._old_hubs.pop() - _local.set(old) - - old_current_scope = self._old_current_scopes.pop() - scope._current_scope.set(old_current_scope) - - old_isolation_scope = self._old_isolation_scopes.pop() - scope._isolation_scope.set(old_isolation_scope) - - def run( - self, callback # type: Callable[[], T] - ): - # type: (...) -> T - """ - .. deprecated:: 2.0.0 - This function is deprecated and will be removed in a future release. - - Runs a callback in the context of the hub. Alternatively the - with statement can be used on the hub directly. - """ - with self: - return callback() - - def get_integration( - self, name_or_class # type: Union[str, Type[Integration]] - ): - # type: (...) -> Any - """ - .. deprecated:: 2.0.0 - This function is deprecated and will be removed in a future release. - Please use :py:meth:`sentry_sdk.client._Client.get_integration` instead. - - Returns the integration for this hub by name or class. If there - is no client bound or the client does not have that integration - then `None` is returned. - - If the return value is not `None` the hub is guaranteed to have a - client attached. - """ - return get_client().get_integration(name_or_class) - - @property - def client(self): - # type: () -> Optional[BaseClient] - """ - .. deprecated:: 2.0.0 - This property is deprecated and will be removed in a future release. - Please use :py:func:`sentry_sdk.api.get_client` instead. - - Returns the current client on the hub. - """ - client = get_client() - - if not client.is_active(): - return None - - return client - - @property - def scope(self): - # type: () -> Scope - """ - .. deprecated:: 2.0.0 - This property is deprecated and will be removed in a future release. - Returns the current scope on the hub. - """ - return get_isolation_scope() - - def last_event_id(self): - # type: () -> Optional[str] - """ - Returns the last event ID. - - .. deprecated:: 1.40.5 - This function is deprecated and will be removed in a future release. The functions `capture_event`, `capture_message`, and `capture_exception` return the event ID directly. - """ - logger.warning( - "Deprecated: last_event_id is deprecated. This will be removed in the future. The functions `capture_event`, `capture_message`, and `capture_exception` return the event ID directly." - ) - return self._last_event_id - - def bind_client( - self, new # type: Optional[BaseClient] - ): - # type: (...) -> None - """ - .. deprecated:: 2.0.0 - This function is deprecated and will be removed in a future release. - Please use :py:meth:`sentry_sdk.Scope.set_client` instead. - - Binds a new client to the hub. - """ - get_global_scope().set_client(new) - - def capture_event(self, event, hint=None, scope=None, **scope_kwargs): - # type: (Event, Optional[Hint], Optional[Scope], Any) -> Optional[str] - """ - .. deprecated:: 2.0.0 - This function is deprecated and will be removed in a future release. - Please use :py:meth:`sentry_sdk.Scope.capture_event` instead. - - Captures an event. - - Alias of :py:meth:`sentry_sdk.Scope.capture_event`. - - :param event: A ready-made event that can be directly sent to Sentry. - - :param hint: Contains metadata about the event that can be read from `before_send`, such as the original exception object or a HTTP request object. - - :param scope: An optional :py:class:`sentry_sdk.Scope` to apply to events. - The `scope` and `scope_kwargs` parameters are mutually exclusive. - - :param scope_kwargs: Optional data to apply to event. - For supported `**scope_kwargs` see :py:meth:`sentry_sdk.Scope.update_from_kwargs`. - The `scope` and `scope_kwargs` parameters are mutually exclusive. - """ - last_event_id = get_current_scope().capture_event( - event, hint, scope=scope, **scope_kwargs - ) - - is_transaction = event.get("type") == "transaction" - if last_event_id is not None and not is_transaction: - self._last_event_id = last_event_id - - return last_event_id - - def capture_message(self, message, level=None, scope=None, **scope_kwargs): - # type: (str, Optional[LogLevelStr], Optional[Scope], Any) -> Optional[str] - """ - .. deprecated:: 2.0.0 - This function is deprecated and will be removed in a future release. - Please use :py:meth:`sentry_sdk.Scope.capture_message` instead. - - Captures a message. - - Alias of :py:meth:`sentry_sdk.Scope.capture_message`. - - :param message: The string to send as the message to Sentry. - - :param level: If no level is provided, the default level is `info`. - - :param scope: An optional :py:class:`sentry_sdk.Scope` to apply to events. - The `scope` and `scope_kwargs` parameters are mutually exclusive. - - :param scope_kwargs: Optional data to apply to event. - For supported `**scope_kwargs` see :py:meth:`sentry_sdk.Scope.update_from_kwargs`. - The `scope` and `scope_kwargs` parameters are mutually exclusive. - - :returns: An `event_id` if the SDK decided to send the event (see :py:meth:`sentry_sdk.client._Client.capture_event`). - """ - last_event_id = get_current_scope().capture_message( - message, level=level, scope=scope, **scope_kwargs - ) - - if last_event_id is not None: - self._last_event_id = last_event_id - - return last_event_id - - def capture_exception(self, error=None, scope=None, **scope_kwargs): - # type: (Optional[Union[BaseException, ExcInfo]], Optional[Scope], Any) -> Optional[str] - """ - .. deprecated:: 2.0.0 - This function is deprecated and will be removed in a future release. - Please use :py:meth:`sentry_sdk.Scope.capture_exception` instead. - - Captures an exception. - - Alias of :py:meth:`sentry_sdk.Scope.capture_exception`. - - :param error: An exception to capture. If `None`, `sys.exc_info()` will be used. - - :param scope: An optional :py:class:`sentry_sdk.Scope` to apply to events. - The `scope` and `scope_kwargs` parameters are mutually exclusive. - - :param scope_kwargs: Optional data to apply to event. - For supported `**scope_kwargs` see :py:meth:`sentry_sdk.Scope.update_from_kwargs`. - The `scope` and `scope_kwargs` parameters are mutually exclusive. - - :returns: An `event_id` if the SDK decided to send the event (see :py:meth:`sentry_sdk.client._Client.capture_event`). - """ - last_event_id = get_current_scope().capture_exception( - error, scope=scope, **scope_kwargs - ) - - if last_event_id is not None: - self._last_event_id = last_event_id - - return last_event_id - - def add_breadcrumb(self, crumb=None, hint=None, **kwargs): - # type: (Optional[Breadcrumb], Optional[BreadcrumbHint], Any) -> None - """ - .. deprecated:: 2.0.0 - This function is deprecated and will be removed in a future release. - Please use :py:meth:`sentry_sdk.Scope.add_breadcrumb` instead. - - Adds a breadcrumb. - - :param crumb: Dictionary with the data as the sentry v7/v8 protocol expects. - - :param hint: An optional value that can be used by `before_breadcrumb` - to customize the breadcrumbs that are emitted. - """ - get_isolation_scope().add_breadcrumb(crumb, hint, **kwargs) - - def start_span(self, **kwargs): - # type: (Any) -> Span - """ - .. deprecated:: 2.0.0 - This function is deprecated and will be removed in a future release. - Please use :py:meth:`sentry_sdk.Scope.start_span` instead. - - Start a span whose parent is the currently active span or transaction, if any. - - The return value is a :py:class:`sentry_sdk.tracing.Span` instance, - typically used as a context manager to start and stop timing in a `with` - block. - - Only spans contained in a transaction are sent to Sentry. Most - integrations start a transaction at the appropriate time, for example - for every incoming HTTP request. Use - :py:meth:`sentry_sdk.start_transaction` to start a new transaction when - one is not already in progress. - - For supported `**kwargs` see :py:class:`sentry_sdk.tracing.Span`. - """ - scope = get_current_scope() - return scope.start_span(**kwargs) - - def start_transaction( - self, transaction=None, custom_sampling_context=None, **kwargs - ): - # type: (Optional[Transaction], Optional[SamplingContext], Unpack[TransactionKwargs]) -> Union[Transaction, NoOpSpan] - """ - .. deprecated:: 2.0.0 - This function is deprecated and will be removed in a future release. - Please use :py:meth:`sentry_sdk.Scope.start_transaction` instead. - - Start and return a transaction. - - Start an existing transaction if given, otherwise create and start a new - transaction with kwargs. - - This is the entry point to manual tracing instrumentation. - - A tree structure can be built by adding child spans to the transaction, - and child spans to other spans. To start a new child span within the - transaction or any span, call the respective `.start_child()` method. - - Every child span must be finished before the transaction is finished, - otherwise the unfinished spans are discarded. - - When used as context managers, spans and transactions are automatically - finished at the end of the `with` block. If not using context managers, - call the `.finish()` method. - - When the transaction is finished, it will be sent to Sentry with all its - finished child spans. - - For supported `**kwargs` see :py:class:`sentry_sdk.tracing.Transaction`. - """ - scope = get_current_scope() - - # For backwards compatibility, we allow passing the scope as the hub. - # We need a major release to make this nice. (if someone searches the code: deprecated) - # Type checking disabled for this line because deprecated keys are not allowed in the type signature. - kwargs["hub"] = scope # type: ignore - - return scope.start_transaction(transaction, custom_sampling_context, **kwargs) - - def continue_trace(self, environ_or_headers, op=None, name=None, source=None): - # type: (Dict[str, Any], Optional[str], Optional[str], Optional[str]) -> Transaction - """ - .. deprecated:: 2.0.0 - This function is deprecated and will be removed in a future release. - Please use :py:meth:`sentry_sdk.Scope.continue_trace` instead. - - Sets the propagation context from environment or headers and returns a transaction. - """ - return get_isolation_scope().continue_trace( - environ_or_headers=environ_or_headers, op=op, name=name, source=source - ) - - @overload - def push_scope( - self, callback=None # type: Optional[None] - ): - # type: (...) -> ContextManager[Scope] - pass - - @overload - def push_scope( # noqa: F811 - self, callback # type: Callable[[Scope], None] - ): - # type: (...) -> None - pass - - def push_scope( # noqa - self, - callback=None, # type: Optional[Callable[[Scope], None]] - continue_trace=True, # type: bool - ): - # type: (...) -> Optional[ContextManager[Scope]] - """ - .. deprecated:: 2.0.0 - This function is deprecated and will be removed in a future release. - - Pushes a new layer on the scope stack. - - :param callback: If provided, this method pushes a scope, calls - `callback`, and pops the scope again. - - :returns: If no `callback` is provided, a context manager that should - be used to pop the scope again. - """ - if callback is not None: - with self.push_scope() as scope: - callback(scope) - return None - - return _ScopeManager(self) - - def pop_scope_unsafe(self): - # type: () -> Tuple[Optional[Client], Scope] - """ - .. deprecated:: 2.0.0 - This function is deprecated and will be removed in a future release. - - Pops a scope layer from the stack. - - Try to use the context manager :py:meth:`push_scope` instead. - """ - rv = self._stack.pop() - assert self._stack, "stack must have at least one layer" - return rv - - @overload - def configure_scope( - self, callback=None # type: Optional[None] - ): - # type: (...) -> ContextManager[Scope] - pass - - @overload - def configure_scope( # noqa: F811 - self, callback # type: Callable[[Scope], None] - ): - # type: (...) -> None - pass - - def configure_scope( # noqa - self, - callback=None, # type: Optional[Callable[[Scope], None]] - continue_trace=True, # type: bool - ): - # type: (...) -> Optional[ContextManager[Scope]] - """ - .. deprecated:: 2.0.0 - This function is deprecated and will be removed in a future release. - - Reconfigures the scope. - - :param callback: If provided, call the callback with the current scope. - - :returns: If no callback is provided, returns a context manager that returns the scope. - """ - scope = get_isolation_scope() - - if continue_trace: - scope.generate_propagation_context() - - if callback is not None: - # TODO: used to return None when client is None. Check if this changes behavior. - callback(scope) - - return None - - @contextmanager - def inner(): - # type: () -> Generator[Scope, None, None] - yield scope - - return inner() - - def start_session( - self, session_mode="application" # type: str - ): - # type: (...) -> None - """ - .. deprecated:: 2.0.0 - This function is deprecated and will be removed in a future release. - Please use :py:meth:`sentry_sdk.Scope.start_session` instead. - - Starts a new session. - """ - get_isolation_scope().start_session( - session_mode=session_mode, - ) - - def end_session(self): - # type: (...) -> None - """ - .. deprecated:: 2.0.0 - This function is deprecated and will be removed in a future release. - Please use :py:meth:`sentry_sdk.Scope.end_session` instead. - - Ends the current session if there is one. - """ - get_isolation_scope().end_session() - - def stop_auto_session_tracking(self): - # type: (...) -> None - """ - .. deprecated:: 2.0.0 - This function is deprecated and will be removed in a future release. - Please use :py:meth:`sentry_sdk.Scope.stop_auto_session_tracking` instead. - - Stops automatic session tracking. - - This temporarily session tracking for the current scope when called. - To resume session tracking call `resume_auto_session_tracking`. - """ - get_isolation_scope().stop_auto_session_tracking() - - def resume_auto_session_tracking(self): - # type: (...) -> None - """ - .. deprecated:: 2.0.0 - This function is deprecated and will be removed in a future release. - Please use :py:meth:`sentry_sdk.Scope.resume_auto_session_tracking` instead. - - Resumes automatic session tracking for the current scope if - disabled earlier. This requires that generally automatic session - tracking is enabled. - """ - get_isolation_scope().resume_auto_session_tracking() - - def flush( - self, - timeout=None, # type: Optional[float] - callback=None, # type: Optional[Callable[[int, float], None]] - ): - # type: (...) -> None - """ - .. deprecated:: 2.0.0 - This function is deprecated and will be removed in a future release. - Please use :py:meth:`sentry_sdk.client._Client.flush` instead. - - Alias for :py:meth:`sentry_sdk.client._Client.flush` - """ - return get_client().flush(timeout=timeout, callback=callback) - - def get_traceparent(self): - # type: () -> Optional[str] - """ - .. deprecated:: 2.0.0 - This function is deprecated and will be removed in a future release. - Please use :py:meth:`sentry_sdk.Scope.get_traceparent` instead. - - Returns the traceparent either from the active span or from the scope. - """ - current_scope = get_current_scope() - traceparent = current_scope.get_traceparent() - - if traceparent is None: - isolation_scope = get_isolation_scope() - traceparent = isolation_scope.get_traceparent() - - return traceparent - - def get_baggage(self): - # type: () -> Optional[str] - """ - .. deprecated:: 2.0.0 - This function is deprecated and will be removed in a future release. - Please use :py:meth:`sentry_sdk.Scope.get_baggage` instead. - - Returns Baggage either from the active span or from the scope. - """ - current_scope = get_current_scope() - baggage = current_scope.get_baggage() - - if baggage is None: - isolation_scope = get_isolation_scope() - baggage = isolation_scope.get_baggage() - - if baggage is not None: - return baggage.serialize() - - return None - - def iter_trace_propagation_headers(self, span=None): - # type: (Optional[Span]) -> Generator[Tuple[str, str], None, None] - """ - .. deprecated:: 2.0.0 - This function is deprecated and will be removed in a future release. - Please use :py:meth:`sentry_sdk.Scope.iter_trace_propagation_headers` instead. - - Return HTTP headers which allow propagation of trace data. Data taken - from the span representing the request, if available, or the current - span on the scope if not. - """ - return get_current_scope().iter_trace_propagation_headers( - span=span, - ) - - def trace_propagation_meta(self, span=None): - # type: (Optional[Span]) -> str - """ - .. deprecated:: 2.0.0 - This function is deprecated and will be removed in a future release. - Please use :py:meth:`sentry_sdk.Scope.trace_propagation_meta` instead. - - Return meta tags which should be injected into HTML templates - to allow propagation of trace information. - """ - if span is not None: - logger.warning( - "The parameter `span` in trace_propagation_meta() is deprecated and will be removed in the future." - ) - - return get_current_scope().trace_propagation_meta( - span=span, - ) - - -with _suppress_hub_deprecation_warning(): - # Suppress deprecation warning for the Hub here, since we still always - # import this module. - GLOBAL_HUB = Hub() -_local.set(GLOBAL_HUB) - - -# Circular imports -from sentry_sdk import scope diff --git a/sentry_sdk/integrations/sanic.py b/sentry_sdk/integrations/sanic.py index 36e3b4c892..ee299215c9 100644 --- a/sentry_sdk/integrations/sanic.py +++ b/sentry_sdk/integrations/sanic.py @@ -218,7 +218,7 @@ async def _context_exit(request, response=None): response_status = None if response is None else response.status # This capture_internal_exceptions block has been intentionally nested here, so that in case an exception - # happens while trying to end the transaction, we still attempt to exit the hub. + # happens while trying to end the transaction, we still attempt to exit the scope. with capture_internal_exceptions(): request.ctx._sentry_transaction.set_http_status(response_status) request.ctx._sentry_transaction.sampled &= ( diff --git a/sentry_sdk/profiler/transaction_profiler.py b/sentry_sdk/profiler/transaction_profiler.py index 6ed983fb59..4ccad4f298 100644 --- a/sentry_sdk/profiler/transaction_profiler.py +++ b/sentry_sdk/profiler/transaction_profiler.py @@ -33,7 +33,6 @@ import threading import time import uuid -import warnings from abc import ABC, abstractmethod from collections import deque @@ -209,7 +208,6 @@ def __init__( self, sampled, # type: Optional[bool] start_ns, # type: int - hub=None, # type: Optional[sentry_sdk.Hub] scheduler=None, # type: Optional[Scheduler] ): # type: (...) -> None @@ -240,16 +238,6 @@ def __init__( self.unique_samples = 0 - # Backwards compatibility with the old hub property - self._hub = None # type: Optional[sentry_sdk.Hub] - if hub is not None: - self._hub = hub - warnings.warn( - "The `hub` parameter is deprecated. Please do not use it.", - DeprecationWarning, - stacklevel=2, - ) - def update_active_thread_id(self): # type: () -> None self.active_thread_id = get_current_thread_meta()[0] @@ -516,26 +504,6 @@ def valid(self): return True - @property - def hub(self): - # type: () -> Optional[sentry_sdk.Hub] - warnings.warn( - "The `hub` attribute is deprecated. Please do not access it.", - DeprecationWarning, - stacklevel=2, - ) - return self._hub - - @hub.setter - def hub(self, value): - # type: (Optional[sentry_sdk.Hub]) -> None - warnings.warn( - "The `hub` attribute is deprecated. Please do not set it.", - DeprecationWarning, - stacklevel=2, - ) - self._hub = value - class Scheduler(ABC): mode = "unknown" # type: ProfilerMode diff --git a/sentry_sdk/scope.py b/sentry_sdk/scope.py index 0c475d22b7..111c28dc7f 100644 --- a/sentry_sdk/scope.py +++ b/sentry_sdk/scope.py @@ -105,28 +105,6 @@ class ScopeType(Enum): MERGED = "merged" -class _ScopeManager: - def __init__(self, hub=None): - # type: (Optional[Any]) -> None - self._old_scopes = [] # type: List[Scope] - - def __enter__(self): - # type: () -> Scope - isolation_scope = Scope.get_isolation_scope() - - self._old_scopes.append(isolation_scope) - - forked_scope = isolation_scope.fork() - _isolation_scope.set(forked_scope) - - return forked_scope - - def __exit__(self, exc_type, exc_value, tb): - # type: (Any, Any, Any) -> None - old_scope = self._old_scopes.pop() - _isolation_scope.set(old_scope) - - def add_global_event_processor(processor): # type: (EventProcessor) -> None global_event_processors.append(processor) diff --git a/sentry_sdk/sessions.py b/sentry_sdk/sessions.py index 66bbdfd5ec..77e9051550 100644 --- a/sentry_sdk/sessions.py +++ b/sentry_sdk/sessions.py @@ -1,6 +1,5 @@ import os import time -import warnings from threading import Thread, Lock from contextlib import contextmanager @@ -17,75 +16,6 @@ from typing import Generator from typing import List from typing import Optional - from typing import Union - - -def is_auto_session_tracking_enabled(hub=None): - # type: (Optional[sentry_sdk.Hub]) -> Union[Any, bool, None] - """DEPRECATED: Utility function to find out if session tracking is enabled.""" - - # Internal callers should use private _is_auto_session_tracking_enabled, instead. - warnings.warn( - "This function is deprecated and will be removed in the next major release. " - "There is no public API replacement.", - DeprecationWarning, - stacklevel=2, - ) - - if hub is None: - hub = sentry_sdk.Hub.current - - should_track = hub.scope._force_auto_session_tracking - - if should_track is None: - client_options = hub.client.options if hub.client else {} - should_track = client_options.get("auto_session_tracking", False) - - return should_track - - -@contextmanager -def auto_session_tracking(hub=None, session_mode="application"): - # type: (Optional[sentry_sdk.Hub], str) -> Generator[None, None, None] - """DEPRECATED: Use track_session instead - Starts and stops a session automatically around a block. - """ - warnings.warn( - "This function is deprecated and will be removed in the next major release. " - "Use track_session instead.", - DeprecationWarning, - stacklevel=2, - ) - - if hub is None: - hub = sentry_sdk.Hub.current - with warnings.catch_warnings(): - warnings.simplefilter("ignore", DeprecationWarning) - should_track = is_auto_session_tracking_enabled(hub) - if should_track: - hub.start_session(session_mode=session_mode) - try: - yield - finally: - if should_track: - hub.end_session() - - -def is_auto_session_tracking_enabled_scope(scope): - # type: (sentry_sdk.Scope) -> bool - """ - DEPRECATED: Utility function to find out if session tracking is enabled. - """ - - warnings.warn( - "This function is deprecated and will be removed in the next major release. " - "There is no public API replacement.", - DeprecationWarning, - stacklevel=2, - ) - - # Internal callers should use private _is_auto_session_tracking_enabled, instead. - return _is_auto_session_tracking_enabled(scope) def _is_auto_session_tracking_enabled(scope): @@ -102,23 +32,6 @@ def _is_auto_session_tracking_enabled(scope): return should_track -@contextmanager -def auto_session_tracking_scope(scope, session_mode="application"): - # type: (sentry_sdk.Scope, str) -> Generator[None, None, None] - """DEPRECATED: This function is a deprecated alias for track_session. - Starts and stops a session automatically around a block. - """ - - warnings.warn( - "This function is a deprecated alias for track_session and will be removed in the next major release.", - DeprecationWarning, - stacklevel=2, - ) - - with track_session(scope, session_mode=session_mode): - yield - - @contextmanager def track_session(scope, session_mode="application"): # type: (sentry_sdk.Scope, str) -> Generator[None, None, None] diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py index 484320ccbd..88ea7f55ff 100644 --- a/sentry_sdk/tracing.py +++ b/sentry_sdk/tracing.py @@ -1,6 +1,5 @@ import uuid import random -import warnings from datetime import datetime, timedelta, timezone import sentry_sdk @@ -71,9 +70,6 @@ class SpanKwargs(TypedDict, total=False): description: str """A description of what operation is being performed within the span.""" - hub: Optional["sentry_sdk.Hub"] - """The hub to use for this span. This argument is DEPRECATED. Please use the `scope` parameter, instead.""" - status: str """The span's status. Possible values are listed at https://develop.sentry.dev/sdk/event-payloads/span/""" @@ -226,10 +222,6 @@ class Span: :param op: The span's operation. A list of recommended values is available here: https://develop.sentry.dev/sdk/performance/span-operations/ :param description: A description of what operation is being performed within the span. - :param hub: The hub to use for this span. - - .. deprecated:: 2.0.0 - Please use the `scope` parameter, instead. :param status: The span's status. Possible values are listed at https://develop.sentry.dev/sdk/event-payloads/span/ :param containing_transaction: The transaction that this span belongs to. @@ -254,7 +246,6 @@ class Span: "_tags", "_data", "_span_recorder", - "hub", "_context_manager_state", "_containing_transaction", "_local_aggregator", @@ -271,7 +262,6 @@ def __init__( sampled=None, # type: Optional[bool] op=None, # type: Optional[str] description=None, # type: Optional[str] - hub=None, # type: Optional[sentry_sdk.Hub] # deprecated status=None, # type: Optional[str] containing_transaction=None, # type: Optional[Transaction] start_timestamp=None, # type: Optional[Union[datetime, float]] @@ -287,7 +277,6 @@ def __init__( self.op = op self.description = description self.status = status - self.hub = hub # backwards compatibility self.scope = scope self.origin = origin self._measurements = {} # type: Dict[str, MeasurementValue] @@ -295,15 +284,6 @@ def __init__( self._data = {} # type: Dict[str, Any] self._containing_transaction = containing_transaction - if hub is not None: - warnings.warn( - "The `hub` parameter is deprecated. Please use `scope` instead.", - DeprecationWarning, - stacklevel=2, - ) - - self.scope = self.scope or hub.scope - if start_timestamp is None: start_timestamp = datetime.now(timezone.utc) elif isinstance(start_timestamp, float): @@ -831,45 +811,10 @@ def containing_transaction(self): # reference. return self - def _get_scope_from_finish_args( - self, - scope_arg, # type: Optional[Union[sentry_sdk.Scope, sentry_sdk.Hub]] - hub_arg, # type: Optional[Union[sentry_sdk.Scope, sentry_sdk.Hub]] - ): - # type: (...) -> Optional[sentry_sdk.Scope] - """ - Logic to get the scope from the arguments passed to finish. This - function exists for backwards compatibility with the old finish. - - TODO: Remove this function in the next major version. - """ - scope_or_hub = scope_arg - if hub_arg is not None: - warnings.warn( - "The `hub` parameter is deprecated. Please use the `scope` parameter, instead.", - DeprecationWarning, - stacklevel=3, - ) - - scope_or_hub = hub_arg - - if isinstance(scope_or_hub, sentry_sdk.Hub): - warnings.warn( - "Passing a Hub to finish is deprecated. Please pass a Scope, instead.", - DeprecationWarning, - stacklevel=3, - ) - - return scope_or_hub.scope - - return scope_or_hub - def finish( self, scope=None, # type: Optional[sentry_sdk.Scope] end_timestamp=None, # type: Optional[Union[float, datetime]] - *, - hub=None, # type: Optional[sentry_sdk.Hub] ): # type: (...) -> Optional[str] """Finishes the transaction and sends it to Sentry. @@ -879,9 +824,6 @@ def finish( If not provided, the current Scope will be used. :param end_timestamp: Optional timestamp that should be used as timestamp instead of the current time. - :param hub: The hub to use for this transaction. - This argument is DEPRECATED. Please use the `scope` - parameter, instead. :return: The event ID if the transaction was sent to Sentry, otherwise None. @@ -890,12 +832,6 @@ def finish( # This transaction is already finished, ignore. return None - # For backwards compatibility, we must handle the case where `scope` - # or `hub` could both either be a `Scope` or a `Hub`. - scope = self._get_scope_from_finish_args( - scope, hub - ) # type: Optional[sentry_sdk.Scope] - scope = scope or self.scope or sentry_sdk.get_current_scope() client = sentry_sdk.get_client() @@ -1215,13 +1151,8 @@ def finish( self, scope=None, # type: Optional[sentry_sdk.Scope] end_timestamp=None, # type: Optional[Union[float, datetime]] - *, - hub=None, # type: Optional[sentry_sdk.Hub] ): # type: (...) -> Optional[str] - """ - The `hub` parameter is deprecated. Please use the `scope` parameter, instead. - """ pass def set_measurement(self, name, value, unit=""): diff --git a/sentry_sdk/utils.py b/sentry_sdk/utils.py index d731fa2254..106b03cdce 100644 --- a/sentry_sdk/utils.py +++ b/sentry_sdk/utils.py @@ -52,7 +52,7 @@ Union, ) - from gevent.hub import Hub + from gevent.hub import Hub as GeventHub from sentry_sdk._types import Event, ExcInfo @@ -1814,9 +1814,9 @@ def now(): except ImportError: # it's not great that the signatures are different, get_hub can't return None - # consider adding an if TYPE_CHECKING to change the signature to Optional[Hub] + # consider adding an if TYPE_CHECKING to change the signature to Optional[GeventHub] def get_gevent_hub(): # type: ignore[misc] - # type: () -> Optional[Hub] + # type: () -> Optional[GeventHub] return None def is_module_patched(mod_name): diff --git a/tests/integrations/conftest.py b/tests/integrations/conftest.py index 560155e2b5..2dd25bab6f 100644 --- a/tests/integrations/conftest.py +++ b/tests/integrations/conftest.py @@ -6,19 +6,8 @@ def capture_exceptions(monkeypatch): def inner(): errors = set() - old_capture_event_hub = sentry_sdk.Hub.capture_event old_capture_event_scope = sentry_sdk.Scope.capture_event - def capture_event_hub(self, event, hint=None, scope=None): - """ - Can be removed when we remove push_scope and the Hub from the SDK. - """ - if hint: - if "exc_info" in hint: - error = hint["exc_info"][1] - errors.add(error) - return old_capture_event_hub(self, event, hint=hint, scope=scope) - def capture_event_scope(self, event, hint=None, scope=None): if hint: if "exc_info" in hint: @@ -26,7 +15,6 @@ def capture_event_scope(self, event, hint=None, scope=None): errors.add(error) return old_capture_event_scope(self, event, hint=hint, scope=scope) - monkeypatch.setattr(sentry_sdk.Hub, "capture_event", capture_event_hub) monkeypatch.setattr(sentry_sdk.Scope, "capture_event", capture_event_scope) return errors diff --git a/tests/new_scopes_compat/__init__.py b/tests/new_scopes_compat/__init__.py deleted file mode 100644 index 45391bd9ad..0000000000 --- a/tests/new_scopes_compat/__init__.py +++ /dev/null @@ -1,7 +0,0 @@ -""" -Separate module for tests that check backwards compatibility of the Hub API with 1.x. -These tests should be removed once we remove the Hub API, likely in the next major. - -All tests in this module are run with hub isolation, provided by `isolate_hub` autouse -fixture, defined in `conftest.py`. -""" diff --git a/tests/new_scopes_compat/conftest.py b/tests/new_scopes_compat/conftest.py deleted file mode 100644 index 9f16898dea..0000000000 --- a/tests/new_scopes_compat/conftest.py +++ /dev/null @@ -1,8 +0,0 @@ -import pytest -import sentry_sdk - - -@pytest.fixture(autouse=True) -def isolate_hub(suppress_deprecation_warnings): - with sentry_sdk.Hub(None): - yield diff --git a/tests/new_scopes_compat/test_new_scopes_compat.py b/tests/new_scopes_compat/test_new_scopes_compat.py deleted file mode 100644 index 1e109ec036..0000000000 --- a/tests/new_scopes_compat/test_new_scopes_compat.py +++ /dev/null @@ -1,217 +0,0 @@ -import sentry_sdk -from sentry_sdk.hub import Hub - -""" -Those tests are meant to check the compatibility of the new scopes in SDK 2.0 with the old Hub/Scope system in SDK 1.x. - -Those tests have been run with the latest SDK 1.x versiona and the data used in the `assert` statements represents -the behvaior of the SDK 1.x. - -This makes sure that we are backwards compatible. (on a best effort basis, there will probably be some edge cases that are not covered here) -""" - - -def test_with_hub_sdk1(sentry_init, capture_events): - """ - Mutate data in a `with Hub:` block - - Checks the results of SDK 2.x against the results the same code returned in SDK 1.x. - """ - sentry_init() - - events = capture_events() - - sentry_sdk.set_tag("A", 1) - sentry_sdk.capture_message("Event A") - - with Hub.current as hub: # with hub - sentry_sdk.set_tag("B1", 1) - hub.scope.set_tag("B2", 1) - sentry_sdk.capture_message("Event B") - - sentry_sdk.set_tag("Z", 1) - sentry_sdk.capture_message("Event Z") - - (event_a, event_b, event_z) = events - - # Check against the results the same code returned in SDK 1.x - assert event_a["tags"] == {"A": 1} - assert event_b["tags"] == {"A": 1, "B1": 1, "B2": 1} - assert event_z["tags"] == {"A": 1, "B1": 1, "B2": 1, "Z": 1} - - -def test_with_hub_configure_scope_sdk1(sentry_init, capture_events): - """ - Mutate data in a `with Hub:` containing a `with configure_scope` block - - Checks the results of SDK 2.x against the results the same code returned in SDK 1.x. - """ - sentry_init() - - events = capture_events() - - sentry_sdk.set_tag("A", 1) - sentry_sdk.capture_message("Event A") - - with Hub.current as hub: # with hub - sentry_sdk.set_tag("B1", 1) - with hub.configure_scope() as scope: # configure scope - sentry_sdk.set_tag("B2", 1) - hub.scope.set_tag("B3", 1) - scope.set_tag("B4", 1) - sentry_sdk.capture_message("Event B") - sentry_sdk.set_tag("B5", 1) - sentry_sdk.capture_message("Event C") - - sentry_sdk.set_tag("Z", 1) - sentry_sdk.capture_message("Event Z") - - (event_a, event_b, event_c, event_z) = events - - # Check against the results the same code returned in SDK 1.x - assert event_a["tags"] == {"A": 1} - assert event_b["tags"] == {"A": 1, "B1": 1, "B2": 1, "B3": 1, "B4": 1} - assert event_c["tags"] == {"A": 1, "B1": 1, "B2": 1, "B3": 1, "B4": 1, "B5": 1} - assert event_z["tags"] == { - "A": 1, - "B1": 1, - "B2": 1, - "B3": 1, - "B4": 1, - "B5": 1, - "Z": 1, - } - - -def test_with_hub_push_scope_sdk1(sentry_init, capture_events): - """ - Mutate data in a `with Hub:` containing a `with push_scope` block - - Checks the results of SDK 2.x against the results the same code returned in SDK 1.x. - """ - sentry_init() - - events = capture_events() - - sentry_sdk.set_tag("A", 1) - sentry_sdk.capture_message("Event A") - - with Hub.current as hub: # with hub - sentry_sdk.set_tag("B1", 1) - with hub.push_scope() as scope: # push scope - sentry_sdk.set_tag("B2", 1) - hub.scope.set_tag("B3", 1) - scope.set_tag("B4", 1) - sentry_sdk.capture_message("Event B") - sentry_sdk.set_tag("B5", 1) - sentry_sdk.capture_message("Event C") - - sentry_sdk.set_tag("Z", 1) - sentry_sdk.capture_message("Event Z") - - (event_a, event_b, event_c, event_z) = events - - # Check against the results the same code returned in SDK 1.x - assert event_a["tags"] == {"A": 1} - assert event_b["tags"] == {"A": 1, "B1": 1, "B2": 1, "B3": 1, "B4": 1} - assert event_c["tags"] == {"A": 1, "B1": 1, "B5": 1} - assert event_z["tags"] == {"A": 1, "B1": 1, "B5": 1, "Z": 1} - - -def test_with_cloned_hub_sdk1(sentry_init, capture_events): - """ - Mutate data in a `with cloned Hub:` block - - Checks the results of SDK 2.x against the results the same code returned in SDK 1.x. - """ - sentry_init() - - events = capture_events() - - sentry_sdk.set_tag("A", 1) - sentry_sdk.capture_message("Event A") - - with Hub(Hub.current) as hub: # clone hub - sentry_sdk.set_tag("B1", 1) - hub.scope.set_tag("B2", 1) - sentry_sdk.capture_message("Event B") - - sentry_sdk.set_tag("Z", 1) - sentry_sdk.capture_message("Event Z") - - (event_a, event_b, event_z) = events - - # Check against the results the same code returned in SDK 1.x - assert event_a["tags"] == {"A": 1} - assert event_b["tags"] == {"A": 1, "B1": 1, "B2": 1} - assert event_z["tags"] == {"A": 1, "Z": 1} - - -def test_with_cloned_hub_configure_scope_sdk1(sentry_init, capture_events): - """ - Mutate data in a `with cloned Hub:` containing a `with configure_scope` block - - Checks the results of SDK 2.x against the results the same code returned in SDK 1.x. - """ - sentry_init() - - events = capture_events() - - sentry_sdk.set_tag("A", 1) - sentry_sdk.capture_message("Event A") - - with Hub(Hub.current) as hub: # clone hub - sentry_sdk.set_tag("B1", 1) - with hub.configure_scope() as scope: # configure scope - sentry_sdk.set_tag("B2", 1) - hub.scope.set_tag("B3", 1) - scope.set_tag("B4", 1) - sentry_sdk.capture_message("Event B") - sentry_sdk.set_tag("B5", 1) - sentry_sdk.capture_message("Event C") - - sentry_sdk.set_tag("Z", 1) - sentry_sdk.capture_message("Event Z") - - (event_a, event_b, event_c, event_z) = events - - # Check against the results the same code returned in SDK 1.x - assert event_a["tags"] == {"A": 1} - assert event_b["tags"] == {"A": 1, "B1": 1, "B2": 1, "B3": 1, "B4": 1} - assert event_c["tags"] == {"A": 1, "B1": 1, "B2": 1, "B3": 1, "B4": 1, "B5": 1} - assert event_z["tags"] == {"A": 1, "Z": 1} - - -def test_with_cloned_hub_push_scope_sdk1(sentry_init, capture_events): - """ - Mutate data in a `with cloned Hub:` containing a `with push_scope` block - - Checks the results of SDK 2.x against the results the same code returned in SDK 1.x. - """ - sentry_init() - - events = capture_events() - - sentry_sdk.set_tag("A", 1) - sentry_sdk.capture_message("Event A") - - with Hub(Hub.current) as hub: # clone hub - sentry_sdk.set_tag("B1", 1) - with hub.push_scope() as scope: # push scope - sentry_sdk.set_tag("B2", 1) - hub.scope.set_tag("B3", 1) - scope.set_tag("B4", 1) - sentry_sdk.capture_message("Event B") - sentry_sdk.set_tag("B5", 1) - sentry_sdk.capture_message("Event C") - - sentry_sdk.set_tag("Z", 1) - sentry_sdk.capture_message("Event Z") - - (event_a, event_b, event_c, event_z) = events - - # Check against the results the same code returned in SDK 1.x - assert event_a["tags"] == {"A": 1} - assert event_b["tags"] == {"A": 1, "B1": 1, "B2": 1, "B3": 1, "B4": 1} - assert event_c["tags"] == {"A": 1, "B1": 1, "B5": 1} - assert event_z["tags"] == {"A": 1, "Z": 1} diff --git a/tests/profiler/test_transaction_profiler.py b/tests/profiler/test_transaction_profiler.py index 142fd7d78c..a77942e788 100644 --- a/tests/profiler/test_transaction_profiler.py +++ b/tests/profiler/test_transaction_profiler.py @@ -1,6 +1,5 @@ import inspect import os -import sentry_sdk import sys import threading import time @@ -817,24 +816,6 @@ def test_profile_processing( assert processed["samples"] == expected["samples"] -def test_hub_backwards_compatibility(suppress_deprecation_warnings): - hub = sentry_sdk.Hub() - - with pytest.warns(DeprecationWarning): - profile = Profile(True, 0, hub=hub) - - with pytest.warns(DeprecationWarning): - assert profile.hub is hub - - new_hub = sentry_sdk.Hub() - - with pytest.warns(DeprecationWarning): - profile.hub = new_hub - - with pytest.warns(DeprecationWarning): - assert profile.hub is new_hub - - def test_no_warning_without_hub(): with warnings.catch_warnings(): warnings.simplefilter("error") diff --git a/tests/test_basics.py b/tests/test_basics.py index a98d00a774..9a24827d42 100644 --- a/tests/test_basics.py +++ b/tests/test_basics.py @@ -21,7 +21,6 @@ add_breadcrumb, isolation_scope, new_scope, - Hub, ) from sentry_sdk.integrations import ( _AUTO_ENABLING_INTEGRATIONS, @@ -800,21 +799,3 @@ def test_last_event_id_scope(sentry_init): # Should not crash with isolation_scope() as scope: assert scope.last_event_id() is None - - -def test_hub_constructor_deprecation_warning(): - with pytest.warns(sentry_sdk.hub.SentryHubDeprecationWarning): - Hub() - - -def test_hub_current_deprecation_warning(): - with pytest.warns(sentry_sdk.hub.SentryHubDeprecationWarning) as warning_records: - Hub.current - - # Make sure we only issue one deprecation warning - assert len(warning_records) == 1 - - -def test_hub_main_deprecation_warnings(): - with pytest.warns(sentry_sdk.hub.SentryHubDeprecationWarning): - Hub.main diff --git a/tests/test_sessions.py b/tests/test_sessions.py index 11f0314dda..bc9c598a83 100644 --- a/tests/test_sessions.py +++ b/tests/test_sessions.py @@ -1,7 +1,7 @@ from unittest import mock import sentry_sdk -from sentry_sdk.sessions import auto_session_tracking, track_session +from sentry_sdk.sessions import track_session def sorted_aggregates(item): @@ -83,52 +83,13 @@ def test_aggregates(sentry_init, capture_envelopes): assert aggregates[0]["errored"] == 1 -def test_aggregates_deprecated( - sentry_init, capture_envelopes, suppress_deprecation_warnings -): - sentry_init( - release="fun-release", - environment="not-fun-env", - ) - envelopes = capture_envelopes() - - with auto_session_tracking(session_mode="request"): - with sentry_sdk.new_scope() as scope: - try: - scope.set_user({"id": "42"}) - raise Exception("all is wrong") - except Exception: - sentry_sdk.capture_exception() - - with auto_session_tracking(session_mode="request"): - pass - - sentry_sdk.get_isolation_scope().start_session(session_mode="request") - sentry_sdk.get_isolation_scope().end_session() - sentry_sdk.flush() - - assert len(envelopes) == 2 - assert envelopes[0].get_event() is not None - - sess = envelopes[1] - assert len(sess.items) == 1 - sess_event = sess.items[0].payload.json - assert sess_event["attrs"] == { - "release": "fun-release", - "environment": "not-fun-env", - } - - aggregates = sorted_aggregates(sess_event) - assert len(aggregates) == 1 - assert aggregates[0]["exited"] == 2 - assert aggregates[0]["errored"] == 1 - - def test_aggregates_explicitly_disabled_session_tracking_request_mode( sentry_init, capture_envelopes ): sentry_init( - release="fun-release", environment="not-fun-env", auto_session_tracking=False + release="fun-release", + environment="not-fun-env", + auto_session_tracking=False, ) envelopes = capture_envelopes() @@ -157,38 +118,6 @@ def test_aggregates_explicitly_disabled_session_tracking_request_mode( assert "errored" not in aggregates[0] -def test_aggregates_explicitly_disabled_session_tracking_request_mode_deprecated( - sentry_init, capture_envelopes, suppress_deprecation_warnings -): - sentry_init( - release="fun-release", environment="not-fun-env", auto_session_tracking=False - ) - envelopes = capture_envelopes() - - with auto_session_tracking(session_mode="request"): - with sentry_sdk.new_scope(): - try: - raise Exception("all is wrong") - except Exception: - sentry_sdk.capture_exception() - - with auto_session_tracking(session_mode="request"): - pass - - sentry_sdk.get_isolation_scope().start_session(session_mode="request") - sentry_sdk.get_isolation_scope().end_session() - sentry_sdk.flush() - - sess = envelopes[1] - assert len(sess.items) == 1 - sess_event = sess.items[0].payload.json - - aggregates = sorted_aggregates(sess_event) - assert len(aggregates) == 1 - assert aggregates[0]["exited"] == 1 - assert "errored" not in aggregates[0] - - def test_no_thread_on_shutdown_no_errors(sentry_init): sentry_init( release="fun-release", @@ -214,31 +143,3 @@ def test_no_thread_on_shutdown_no_errors(sentry_init): sentry_sdk.get_isolation_scope().start_session(session_mode="request") sentry_sdk.get_isolation_scope().end_session() sentry_sdk.flush() - - -def test_no_thread_on_shutdown_no_errors_deprecated( - sentry_init, suppress_deprecation_warnings -): - sentry_init( - release="fun-release", - environment="not-fun-env", - ) - - # make it seem like the interpreter is shutting down - with mock.patch( - "threading.Thread.start", - side_effect=RuntimeError("can't create new thread at interpreter shutdown"), - ): - with auto_session_tracking(session_mode="request"): - with sentry_sdk.new_scope(): - try: - raise Exception("all is wrong") - except Exception: - sentry_sdk.capture_exception() - - with auto_session_tracking(session_mode="request"): - pass - - sentry_sdk.get_isolation_scope().start_session(session_mode="request") - sentry_sdk.get_isolation_scope().end_session() - sentry_sdk.flush() diff --git a/tests/tracing/test_decorator.py b/tests/tracing/test_decorator.py index 584268fbdd..c018d610a5 100644 --- a/tests/tracing/test_decorator.py +++ b/tests/tracing/test_decorator.py @@ -31,6 +31,7 @@ def test_trace_decorator(): assert result2 == "return_of_sync_function" +@pytest.mark.forked def test_trace_decorator_no_trx(): with patch_start_tracing_child(fake_transaction_is_none=True): with mock.patch.object(logger, "debug", mock.Mock()) as fake_debug: diff --git a/tests/tracing/test_deprecated.py b/tests/tracing/test_deprecated.py deleted file mode 100644 index fb58e43ebf..0000000000 --- a/tests/tracing/test_deprecated.py +++ /dev/null @@ -1,59 +0,0 @@ -import warnings - -import pytest - -import sentry_sdk -import sentry_sdk.tracing -from sentry_sdk import start_span - -from sentry_sdk.tracing import Span - - -@pytest.mark.skip(reason="This deprecated feature has been removed in SDK 2.0.") -def test_start_span_to_start_transaction(sentry_init, capture_events): - # XXX: this only exists for backwards compatibility with code before - # Transaction / start_transaction were introduced. - sentry_init(traces_sample_rate=1.0) - events = capture_events() - - with start_span(transaction="/1/"): - pass - - with start_span(Span(transaction="/2/")): - pass - - assert len(events) == 2 - assert events[0]["transaction"] == "/1/" - assert events[1]["transaction"] == "/2/" - - -@pytest.mark.parametrize( - "parameter_value_getter", - # Use lambda to avoid Hub deprecation warning here (will suppress it in the test) - (lambda: sentry_sdk.Hub(), lambda: sentry_sdk.Scope()), -) -def test_passing_hub_parameter_to_transaction_finish( - suppress_deprecation_warnings, parameter_value_getter -): - parameter_value = parameter_value_getter() - transaction = sentry_sdk.tracing.Transaction() - with pytest.warns(DeprecationWarning): - transaction.finish(hub=parameter_value) - - -def test_passing_hub_object_to_scope_transaction_finish(suppress_deprecation_warnings): - transaction = sentry_sdk.tracing.Transaction() - - # Do not move the following line under the `with` statement. Otherwise, the Hub.__init__ deprecation - # warning will be confused with the transaction.finish deprecation warning that we are testing. - hub = sentry_sdk.Hub() - - with pytest.warns(DeprecationWarning): - transaction.finish(hub) - - -def test_no_warnings_scope_to_transaction_finish(): - transaction = sentry_sdk.tracing.Transaction() - with warnings.catch_warnings(): - warnings.simplefilter("error") - transaction.finish(sentry_sdk.Scope()) From 2f540eb35a32c738945789fceaa790947a269758 Mon Sep 17 00:00:00 2001 From: Ivana Kellyer Date: Mon, 26 Aug 2024 14:07:49 +0200 Subject: [PATCH 014/244] feat(potel): Make tracing APIs use OTel in the background (#3242) * Skeletons for new components * Add simple scope management whenever a context is attached * create a new otel context `_SCOPES_KEY` that will hold a tuple of `(curent_scope, isolation_scope)` * the `current_scope` will always be forked (like on every span creation/context update in practice) * note that this is on `attach`, so not on all copy-on-write context object creation but only on apis such as [`trace.use_span`](https://github.com/open-telemetry/opentelemetry-python/blob/ba22b165471bde2037620f2c850ab648a849fbc0/opentelemetry-api/src/opentelemetry/trace/__init__.py#L547) or [`tracer.start_as_current_span`](https://github.com/open-telemetry/opentelemetry-python/blob/ba22b165471bde2037620f2c850ab648a849fbc0/opentelemetry-api/src/opentelemetry/trace/__init__.py#L329) * basically every otel `context` fork corresponds to our `current_scope` fork * the `isolation_scope` currently will not be forked * these will later be updated, for instance when we update our top level scope apis that fork isolation scope, that will also have a corresponding change in this `attach` function * Don't parse DSN twice * wip * Skeletons for new components * Skeletons for new components * Add simple scope management whenever a context is attached * create a new otel context `_SCOPES_KEY` that will hold a tuple of `(curent_scope, isolation_scope)` * the `current_scope` will always be forked (like on every span creation/context update in practice) * note that this is on `attach`, so not on all copy-on-write context object creation but only on apis such as [`trace.use_span`](https://github.com/open-telemetry/opentelemetry-python/blob/ba22b165471bde2037620f2c850ab648a849fbc0/opentelemetry-api/src/opentelemetry/trace/__init__.py#L547) or [`tracer.start_as_current_span`](https://github.com/open-telemetry/opentelemetry-python/blob/ba22b165471bde2037620f2c850ab648a849fbc0/opentelemetry-api/src/opentelemetry/trace/__init__.py#L329) * basically every otel `context` fork corresponds to our `current_scope` fork * the `isolation_scope` currently will not be forked * these will later be updated, for instance when we update our top level scope apis that fork isolation scope, that will also have a corresponding change in this `attach` function * mypy fixes * working span processor * lint * Port over op/description/status extraction * defaultdict * naive impl * wip * fix args * wip * remove extra docs * Add simple scope management whenever a context is attached (#3159) Add simple scope management whenever a context is attached * create a new otel context `_SCOPES_KEY` that will hold a tuple of `(curent_scope, isolation_scope)` * the `current_scope` will always be forked (like on every span creation/context update in practice) * note that this is on `attach`, so not on all copy-on-write context object creation but only on apis such as [`trace.use_span`](https://github.com/open-telemetry/opentelemetry-python/blob/ba22b165471bde2037620f2c850ab648a849fbc0/opentelemetry-api/src/opentelemetry/trace/__init__.py#L547) or [`tracer.start_as_current_span`](https://github.com/open-telemetry/opentelemetry-python/blob/ba22b165471bde2037620f2c850ab648a849fbc0/opentelemetry-api/src/opentelemetry/trace/__init__.py#L329) * basically every otel `context` fork corresponds to our `current_scope` fork * the `isolation_scope` currently will not be forked * these will later be updated, for instance when we update our top level scope apis that fork isolation scope, that will also have a corresponding change in this `attach` function * Implement new POTel span processor (#3223) * only acts on `on_end` instead of both `on_start/on_end` as before * store children spans in a dict mapping `span_id -> children` * new dict only stores otel span objects and no sentry transaction/span objects so we save a bit of useless memory allocation * I'm not using our current `Transaction/Span` classes at all to build the event because when we add our APIs later, we'll need to rip these out and we also avoid having to deal with the `instrumenter` problem * if we get a root span (without parent), we recursively walk the dict and find the children and package up the transaction event and send it * I didn't do it like JS because I think this way is better * they [group an array of `finished_spans`](https://github.com/getsentry/sentry-javascript/blob/7e298036a21a5658f3eb9ba184165178c48d7ef8/packages/opentelemetry/src/spanExporter.ts#L132) every time a root span ends and I think this uses more cpu than what I did * and the dict like I used it doesn't take more space than the array either * if we get a span with a parent we just update the dict to find the span later * moved the common `is_sentry_span` logic to utils * Basic test cases for potel (#3286) * Proxy POTelSpan.set_data to underlying otel span attributes (#3297) * ref(tracing): Simplify backwards-compat code (#3379) With this change, we aim to simplify the backwards-compatibility code for POTel tracing. We do this as follows: - Remove `start_*` functions from `tracing` - Remove unused parameters from `tracing.POTelSpan.__init__`. - Make all parameters to `tracing.POTelSpan.__init__` kwarg-only. - Allow `tracing.POTelSpan.__init__` to accept arbitrary kwargs, which are all ignored, for compatibility with old `Span` interface. - Completely remove `start_inactive_span`, since inactive spans can be created by setting `active=False` when constructing a `POTelSpan`. * New Scope implementation based on OTel Context (#3389) * New `PotelScope` inherits from scope and reads the scope from the otel context key `SENTRY_SCOPES_KEY` * New `isolation_scope` and `new_scope` context managers just use the context manager forking and yield with the scopes living on the above context key * isolation scope forking is done with the `SENTRY_FORK_ISOLATION_SCOPE_KEY` boolean context key * Fix circular imports (#3431) * Random tweaks (#3437) * Origin improvements (#3432) * Tweak OTel timestamp utils (#3436) * Create spans on scope (#3442) * Fill out more property/method stubs (#3441) * Cleanup origin handling and defaults (#3445) * add note to migration guide * Attribute namespace for tags, measurements (#3448) --------- Co-authored-by: Neel Shah Co-authored-by: Neel Shah Co-authored-by: Daniel Szoke <7881302+szokeasaurusrex@users.noreply.github.com> --- MIGRATION_GUIDE.md | 6 +- sentry_sdk/api.py | 49 ++- sentry_sdk/integrations/asgi.py | 4 +- sentry_sdk/integrations/boto3.py | 3 +- .../integrations/opentelemetry/consts.py | 16 + .../opentelemetry/contextvars_context.py | 30 +- .../integrations/opentelemetry/integration.py | 14 +- .../opentelemetry/potel_span_processor.py | 83 ++-- .../integrations/opentelemetry/scope.py | 84 ++++ .../opentelemetry/span_processor.py | 7 +- .../integrations/opentelemetry/utils.py | 71 +++- sentry_sdk/integrations/wsgi.py | 4 +- sentry_sdk/scope.py | 101 ++--- sentry_sdk/tracing.py | 379 +++++++++++++++++- sentry_sdk/tracing_utils.py | 7 +- setup.py | 1 + tests/conftest.py | 4 + .../integrations/opentelemetry/test_potel.py | 316 +++++++++++++++ .../integrations/opentelemetry/test_utils.py | 23 +- tests/test_api.py | 4 +- tests/tracing/test_misc.py | 6 +- 21 files changed, 1052 insertions(+), 160 deletions(-) create mode 100644 sentry_sdk/integrations/opentelemetry/scope.py create mode 100644 tests/integrations/opentelemetry/test_potel.py diff --git a/MIGRATION_GUIDE.md b/MIGRATION_GUIDE.md index c2e071a078..7a71c3e872 100644 --- a/MIGRATION_GUIDE.md +++ b/MIGRATION_GUIDE.md @@ -9,6 +9,8 @@ Looking to upgrade from Sentry SDK 2.x to 3.x? Here's a comprehensive list of wh ### Changed +- The SDK now supports Python 3.7 and higher. +- `sentry_sdk.start_span` now only takes keyword arguments. - The `Span()` constructor does not accept a `hub` parameter anymore. - `Span.finish()` does not accept a `hub` parameter anymore. - The `Profile()` constructor does not accept a `hub` parameter anymore. @@ -16,15 +18,17 @@ Looking to upgrade from Sentry SDK 2.x to 3.x? Here's a comprehensive list of wh ### Removed +- When setting span status, the HTTP status code is no longer automatically added as a tag. - Class `Hub` has been removed. - Class `_ScopeManager` has been removed. - The context manager `auto_session_tracking()` has been removed. Use `track_session()` instead. - The context manager `auto_session_tracking_scope()` has been removed. Use `track_session()` instead. -- Utility function `is_auto_session_tracking_enabled()` has been removed. There is no public replacement. There is a private `_is_auto_session_tracking_enabled()` (if you absolutely need this function) It accepts a `scope` parameter instead of the previously used `hub` parameter. +- Utility function `is_auto_session_tracking_enabled()` has been removed. There is no public replacement. There is a private `_is_auto_session_tracking_enabled()` (if you absolutely need this function) It accepts a `scope` parameter instead of the previously used `hub` parameter. - Utility function `is_auto_session_tracking_enabled_scope()` has been removed. There is no public replacement. There is a private `_is_auto_session_tracking_enabled()` (if you absolutely need this function) ### Deprecated +- `sentry_sdk.start_transaction` is deprecated. Use `sentry_sdk.start_span` instead. ## Upgrading to 2.0 diff --git a/sentry_sdk/api.py b/sentry_sdk/api.py index 49a3bf7ecf..0b88ea3274 100644 --- a/sentry_sdk/api.py +++ b/sentry_sdk/api.py @@ -2,10 +2,15 @@ from sentry_sdk import tracing_utils, Client from sentry_sdk._init_implementation import init -from sentry_sdk.scope import Scope, new_scope, isolation_scope -from sentry_sdk.tracing import NoOpSpan, Transaction, trace +from sentry_sdk.tracing import POTelSpan, Transaction, trace from sentry_sdk.crons import monitor +# TODO-neel-potel make 2 scope strategies/impls and switch +from sentry_sdk.integrations.opentelemetry.scope import ( + PotelScope as Scope, + new_scope, + isolation_scope, +) from sentry_sdk._types import TYPE_CHECKING @@ -227,22 +232,40 @@ def flush( return get_client().flush(timeout=timeout, callback=callback) -@scopemethod def start_span( + *, + span=None, + custom_sampling_context=None, **kwargs, # type: Any ): - # type: (...) -> Span - return get_current_scope().start_span(**kwargs) + # type: (...) -> POTelSpan + """ + Start and return a span. + + This is the entry point to manual tracing instrumentation. + + A tree structure can be built by adding child spans to the span. + To start a new child span within the span, call the `start_child()` method. + + When used as a context manager, spans are automatically finished at the end + of the `with` block. If not using context managers, call the `finish()` + method. + """ + # TODO: Consider adding type hints to the method signature. + return get_current_scope().start_span(span, custom_sampling_context, **kwargs) -@scopemethod def start_transaction( transaction=None, # type: Optional[Transaction] custom_sampling_context=None, # type: Optional[SamplingContext] **kwargs, # type: Unpack[TransactionKwargs] ): - # type: (...) -> Union[Transaction, NoOpSpan] + # type: (...) -> POTelSpan """ + .. deprecated:: 3.0.0 + This function is deprecated and will be removed in a future release. + Use :py:meth:`sentry_sdk.start_span` instead. + Start and return a transaction on the current scope. Start an existing transaction if given, otherwise create and start a new @@ -271,8 +294,10 @@ def start_transaction( constructor. See :py:class:`sentry_sdk.tracing.Transaction` for available arguments. """ - return get_current_scope().start_transaction( - transaction, custom_sampling_context, **kwargs + return start_span( + span=transaction, + custom_sampling_context=custom_sampling_context, + **kwargs, ) @@ -311,10 +336,8 @@ def get_baggage(): return None -def continue_trace( - environ_or_headers, op=None, name=None, source=None, origin="manual" -): - # type: (Dict[str, Any], Optional[str], Optional[str], Optional[str], str) -> Transaction +def continue_trace(environ_or_headers, op=None, name=None, source=None, origin=None): + # type: (Dict[str, Any], Optional[str], Optional[str], Optional[str], Optional[str]) -> Transaction """ Sets the propagation context from environment or headers and returns a transaction. """ diff --git a/sentry_sdk/integrations/asgi.py b/sentry_sdk/integrations/asgi.py index b952da021d..426f7c4902 100644 --- a/sentry_sdk/integrations/asgi.py +++ b/sentry_sdk/integrations/asgi.py @@ -96,9 +96,9 @@ def __init__( unsafe_context_data=False, transaction_style="endpoint", mechanism_type="asgi", - span_origin="manual", + span_origin=None, ): - # type: (Any, bool, str, str, str) -> None + # type: (Any, bool, str, str, Optional[str]) -> None """ Instrument an ASGI application with Sentry. Provides HTTP/websocket data to sent events and basic handling for exceptions bubbling up diff --git a/sentry_sdk/integrations/boto3.py b/sentry_sdk/integrations/boto3.py index 0fb997767b..3c5131e9d0 100644 --- a/sentry_sdk/integrations/boto3.py +++ b/sentry_sdk/integrations/boto3.py @@ -3,7 +3,6 @@ import sentry_sdk from sentry_sdk.consts import OP, SPANDATA from sentry_sdk.integrations import Integration, DidNotEnable -from sentry_sdk.tracing import Span from sentry_sdk._types import TYPE_CHECKING from sentry_sdk.utils import ( @@ -19,6 +18,8 @@ from typing import Optional from typing import Type + from sentry_sdk.tracing import Span + try: from botocore import __version__ as BOTOCORE_VERSION # type: ignore from botocore.client import BaseClient # type: ignore diff --git a/sentry_sdk/integrations/opentelemetry/consts.py b/sentry_sdk/integrations/opentelemetry/consts.py index 69a770ad53..aca364fd54 100644 --- a/sentry_sdk/integrations/opentelemetry/consts.py +++ b/sentry_sdk/integrations/opentelemetry/consts.py @@ -1,7 +1,23 @@ from opentelemetry.context import create_key +# propagation keys SENTRY_TRACE_KEY = create_key("sentry-trace") SENTRY_BAGGAGE_KEY = create_key("sentry-baggage") + +# scope management keys +SENTRY_SCOPES_KEY = create_key("sentry_scopes") +SENTRY_FORK_ISOLATION_SCOPE_KEY = create_key("sentry_fork_isolation_scope") + OTEL_SENTRY_CONTEXT = "otel" SPAN_ORIGIN = "auto.otel" + + +class SentrySpanAttribute: + # XXX not all of these need separate attributes, we might just use + # existing otel attrs for some + DESCRIPTION = "sentry.description" + OP = "sentry.op" + ORIGIN = "sentry.origin" + MEASUREMENT = "sentry.measurement" + TAG = "sentry.tag" diff --git a/sentry_sdk/integrations/opentelemetry/contextvars_context.py b/sentry_sdk/integrations/opentelemetry/contextvars_context.py index 5e5eb9ba30..86fc253af8 100644 --- a/sentry_sdk/integrations/opentelemetry/contextvars_context.py +++ b/sentry_sdk/integrations/opentelemetry/contextvars_context.py @@ -1,26 +1,32 @@ -from opentelemetry.context import Context, create_key, get_value, set_value +from opentelemetry.context import Context, get_value, set_value from opentelemetry.context.contextvars_context import ContextVarsRuntimeContext -from sentry_sdk.scope import Scope - - -_SCOPES_KEY = create_key("sentry_scopes") +import sentry_sdk +from sentry_sdk.integrations.opentelemetry.consts import ( + SENTRY_SCOPES_KEY, + SENTRY_FORK_ISOLATION_SCOPE_KEY, +) class SentryContextVarsRuntimeContext(ContextVarsRuntimeContext): def attach(self, context): # type: (Context) -> object - scopes = get_value(_SCOPES_KEY, context) + scopes = get_value(SENTRY_SCOPES_KEY, context) + should_fork_isolation_scope = context.pop( + SENTRY_FORK_ISOLATION_SCOPE_KEY, False + ) if scopes and isinstance(scopes, tuple): (current_scope, isolation_scope) = scopes else: - current_scope = Scope.get_current_scope() - isolation_scope = Scope.get_isolation_scope() + current_scope = sentry_sdk.get_current_scope() + isolation_scope = sentry_sdk.get_isolation_scope() - # TODO-neel-potel fork isolation_scope too like JS - # once we setup our own apis to pass through to otel - new_scopes = (current_scope.fork(), isolation_scope) - new_context = set_value(_SCOPES_KEY, new_scopes, context) + new_scope = current_scope.fork() + new_isolation_scope = ( + isolation_scope.fork() if should_fork_isolation_scope else isolation_scope + ) + new_scopes = (new_scope, new_isolation_scope) + new_context = set_value(SENTRY_SCOPES_KEY, new_scopes, context) return super().attach(new_context) diff --git a/sentry_sdk/integrations/opentelemetry/integration.py b/sentry_sdk/integrations/opentelemetry/integration.py index 43e0396c16..4cd969f0e0 100644 --- a/sentry_sdk/integrations/opentelemetry/integration.py +++ b/sentry_sdk/integrations/opentelemetry/integration.py @@ -6,7 +6,12 @@ from sentry_sdk.integrations import DidNotEnable, Integration from sentry_sdk.integrations.opentelemetry.propagator import SentryPropagator -from sentry_sdk.integrations.opentelemetry.span_processor import SentrySpanProcessor +from sentry_sdk.integrations.opentelemetry.potel_span_processor import ( + PotelSentrySpanProcessor, +) +from sentry_sdk.integrations.opentelemetry.contextvars_context import ( + SentryContextVarsRuntimeContext, +) from sentry_sdk.utils import logger try: @@ -46,9 +51,14 @@ def setup_once(): def _setup_sentry_tracing(): # type: () -> None + import opentelemetry.context + + opentelemetry.context._RUNTIME_CONTEXT = SentryContextVarsRuntimeContext() + provider = TracerProvider() - provider.add_span_processor(SentrySpanProcessor()) + provider.add_span_processor(PotelSentrySpanProcessor()) trace.set_tracer_provider(provider) + set_global_textmap(SentryPropagator()) diff --git a/sentry_sdk/integrations/opentelemetry/potel_span_processor.py b/sentry_sdk/integrations/opentelemetry/potel_span_processor.py index 9604676dce..8b2a2f4c36 100644 --- a/sentry_sdk/integrations/opentelemetry/potel_span_processor.py +++ b/sentry_sdk/integrations/opentelemetry/potel_span_processor.py @@ -5,14 +5,16 @@ from opentelemetry.sdk.trace import Span, ReadableSpan, SpanProcessor from sentry_sdk import capture_event +from sentry_sdk.tracing import DEFAULT_SPAN_ORIGIN from sentry_sdk.integrations.opentelemetry.utils import ( is_sentry_span, - convert_otel_timestamp, + convert_from_otel_timestamp, + extract_span_attributes, extract_span_data, ) from sentry_sdk.integrations.opentelemetry.consts import ( OTEL_SENTRY_CONTEXT, - SPAN_ORIGIN, + SentrySpanAttribute, ) from sentry_sdk._types import TYPE_CHECKING @@ -107,21 +109,21 @@ def _root_span_to_transaction_event(self, span): # type: (ReadableSpan) -> Optional[Event] if not span.context: return None - if not span.start_time: - return None - if not span.end_time: + + event = self._common_span_transaction_attributes_as_json(span) + if event is None: return None trace_id = format_trace_id(span.context.trace_id) span_id = format_span_id(span.context.span_id) parent_span_id = format_span_id(span.parent.span_id) if span.parent else None - (op, description, status, _) = extract_span_data(span) + (op, description, status, _, origin) = extract_span_data(span) trace_context = { "trace_id": trace_id, "span_id": span_id, - "origin": SPAN_ORIGIN, + "origin": origin or DEFAULT_SPAN_ORIGIN, "op": op, "status": status, } # type: dict[str, Any] @@ -135,15 +137,15 @@ def _root_span_to_transaction_event(self, span): if span.resource.attributes: contexts[OTEL_SENTRY_CONTEXT] = {"resource": dict(span.resource.attributes)} - event = { - "type": "transaction", - "transaction": description, - # TODO-neel-potel tx source based on integration - "transaction_info": {"source": "custom"}, - "contexts": contexts, - "start_timestamp": convert_otel_timestamp(span.start_time), - "timestamp": convert_otel_timestamp(span.end_time), - } # type: Event + event.update( + { + "type": "transaction", + "transaction": description, + # TODO-neel-potel tx source based on integration + "transaction_info": {"source": "custom"}, + "contexts": contexts, + } + ) # type: Event return event @@ -151,31 +153,52 @@ def _span_to_json(self, span): # type: (ReadableSpan) -> Optional[dict[str, Any]] if not span.context: return None - if not span.start_time: - return None - if not span.end_time: + + span_json = self._common_span_transaction_attributes_as_json(span) + if span_json is None: return None trace_id = format_trace_id(span.context.trace_id) span_id = format_span_id(span.context.span_id) parent_span_id = format_span_id(span.parent.span_id) if span.parent else None - (op, description, status, _) = extract_span_data(span) + (op, description, status, _, origin) = extract_span_data(span) - span_json = { - "trace_id": trace_id, - "span_id": span_id, - "origin": SPAN_ORIGIN, - "op": op, - "description": description, - "status": status, - "start_timestamp": convert_otel_timestamp(span.start_time), - "timestamp": convert_otel_timestamp(span.end_time), - } # type: dict[str, Any] + span_json.update( + { + "trace_id": trace_id, + "span_id": span_id, + "op": op, + "description": description, + "status": status, + "origin": origin or DEFAULT_SPAN_ORIGIN, + } + ) if parent_span_id: span_json["parent_span_id"] = parent_span_id + if span.attributes: span_json["data"] = dict(span.attributes) return span_json + + def _common_span_transaction_attributes_as_json(self, span): + # type: (ReadableSpan) -> Optional[dict[str, Any]] + if not span.start_time or not span.end_time: + return None + + common_json = { + "start_timestamp": convert_from_otel_timestamp(span.start_time), + "timestamp": convert_from_otel_timestamp(span.end_time), + } # type: dict[str, Any] + + measurements = extract_span_attributes(span, SentrySpanAttribute.MEASUREMENT) + if measurements: + common_json["measurements"] = measurements + + tags = extract_span_attributes(span, SentrySpanAttribute.TAG) + if tags: + common_json["tags"] = tags + + return common_json diff --git a/sentry_sdk/integrations/opentelemetry/scope.py b/sentry_sdk/integrations/opentelemetry/scope.py new file mode 100644 index 0000000000..6d6f8f6acf --- /dev/null +++ b/sentry_sdk/integrations/opentelemetry/scope.py @@ -0,0 +1,84 @@ +from typing import cast +from contextlib import contextmanager + +from opentelemetry.context import get_value, set_value, attach, detach, get_current + +from sentry_sdk.scope import Scope, ScopeType +from sentry_sdk.integrations.opentelemetry.consts import ( + SENTRY_SCOPES_KEY, + SENTRY_FORK_ISOLATION_SCOPE_KEY, +) + +from sentry_sdk._types import TYPE_CHECKING + +if TYPE_CHECKING: + from typing import Tuple, Optional, Generator + + +class PotelScope(Scope): + @classmethod + def _get_scopes(cls): + # type: () -> Optional[Tuple[Scope, Scope]] + """ + Returns the current scopes tuple on the otel context. Internal use only. + """ + return cast("Optional[Tuple[Scope, Scope]]", get_value(SENTRY_SCOPES_KEY)) + + @classmethod + def get_current_scope(cls): + # type: () -> Scope + """ + Returns the current scope. + """ + return cls._get_current_scope() or _INITIAL_CURRENT_SCOPE + + @classmethod + def _get_current_scope(cls): + # type: () -> Optional[Scope] + """ + Returns the current scope without creating a new one. Internal use only. + """ + scopes = cls._get_scopes() + return scopes[0] if scopes else None + + @classmethod + def get_isolation_scope(cls): + """ + Returns the isolation scope. + """ + # type: () -> Scope + return cls._get_isolation_scope() or _INITIAL_ISOLATION_SCOPE + + @classmethod + def _get_isolation_scope(cls): + # type: () -> Optional[Scope] + """ + Returns the isolation scope without creating a new one. Internal use only. + """ + scopes = cls._get_scopes() + return scopes[1] if scopes else None + + +_INITIAL_CURRENT_SCOPE = PotelScope(ty=ScopeType.CURRENT) +_INITIAL_ISOLATION_SCOPE = PotelScope(ty=ScopeType.ISOLATION) + + +@contextmanager +def isolation_scope(): + # type: () -> Generator[Scope, None, None] + context = set_value(SENTRY_FORK_ISOLATION_SCOPE_KEY, True) + token = attach(context) + try: + yield PotelScope.get_isolation_scope() + finally: + detach(token) + + +@contextmanager +def new_scope(): + # type: () -> Generator[Scope, None, None] + token = attach(get_current()) + try: + yield PotelScope.get_current_scope() + finally: + detach(token) diff --git a/sentry_sdk/integrations/opentelemetry/span_processor.py b/sentry_sdk/integrations/opentelemetry/span_processor.py index 594ccbb71f..2140b0e70b 100644 --- a/sentry_sdk/integrations/opentelemetry/span_processor.py +++ b/sentry_sdk/integrations/opentelemetry/span_processor.py @@ -13,7 +13,6 @@ INVALID_SPAN_ID, INVALID_TRACE_ID, ) -from sentry_sdk import get_client, start_transaction from sentry_sdk.integrations.opentelemetry.consts import ( SENTRY_BAGGAGE_KEY, SENTRY_TRACE_KEY, @@ -106,6 +105,8 @@ def _prune_old_spans(self): def on_start(self, otel_span, parent_context=None): # type: (OTelSpan, Optional[context_api.Context]) -> None + from sentry_sdk import get_client, start_transaction + client = get_client() if not client.dsn: @@ -258,7 +259,7 @@ def _update_span_with_otel_data(self, sentry_span, otel_span): for key, val in otel_span.attributes.items(): sentry_span.set_data(key, val) - (op, description, status, http_status) = extract_span_data(otel_span) + (op, description, status, http_status, _) = extract_span_data(otel_span) sentry_span.op = op sentry_span.description = description @@ -269,7 +270,7 @@ def _update_span_with_otel_data(self, sentry_span, otel_span): def _update_transaction_with_otel_data(self, sentry_span, otel_span): # type: (SentrySpan, OTelSpan) -> None - (op, _, status, http_status) = extract_span_data(otel_span) + (op, _, status, http_status, _) = extract_span_data(otel_span) sentry_span.op = op if http_status: diff --git a/sentry_sdk/integrations/opentelemetry/utils.py b/sentry_sdk/integrations/opentelemetry/utils.py index cb04dd8e1a..afa42ea772 100644 --- a/sentry_sdk/integrations/opentelemetry/utils.py +++ b/sentry_sdk/integrations/opentelemetry/utils.py @@ -6,15 +6,15 @@ from opentelemetry.sdk.trace import ReadableSpan from sentry_sdk.consts import SPANSTATUS from sentry_sdk.tracing import get_span_status_from_http_code +from sentry_sdk.integrations.opentelemetry.consts import SentrySpanAttribute from urllib3.util import parse_url as urlparse -from sentry_sdk import get_client from sentry_sdk.utils import Dsn from sentry_sdk._types import TYPE_CHECKING if TYPE_CHECKING: - from typing import Optional, Mapping, Sequence + from typing import Any, Optional, Mapping, Sequence, Union GRPC_ERROR_MAP = { @@ -43,6 +43,8 @@ def is_sentry_span(span): Break infinite loop: HTTP requests to Sentry are caught by OTel and send again to Sentry. """ + from sentry_sdk import get_client + if not span.attributes: return False @@ -70,19 +72,32 @@ def is_sentry_span(span): return False -def convert_otel_timestamp(time): +def convert_from_otel_timestamp(time): # type: (int) -> datetime + """Convert an OTel nanosecond-level timestamp to a datetime.""" return datetime.fromtimestamp(time / 1e9, timezone.utc) +def convert_to_otel_timestamp(time): + # type: (Union[datetime.datetime, float]) -> int + """Convert a datetime to an OTel timestamp (with nanosecond precision).""" + if isinstance(time, datetime): + return int(time.timestamp() * 1e9) + return int(time * 1e9) + + def extract_span_data(span): - # type: (ReadableSpan) -> tuple[str, str, Optional[str], Optional[int]] + # type: (ReadableSpan) -> tuple[str, str, Optional[str], Optional[int], Optional[str]] op = span.name description = span.name status, http_status = extract_span_status(span) + origin = None if span.attributes is None: - return (op, description, status, http_status) + return (op, description, status, http_status, origin) + + origin = span.attributes.get(SentrySpanAttribute.ORIGIN) + description = span.attributes.get(SentrySpanAttribute.DESCRIPTION) or description http_method = span.attributes.get(SpanAttributes.HTTP_METHOD) http_method = cast("Optional[str]", http_method) @@ -95,26 +110,21 @@ def extract_span_data(span): rpc_service = span.attributes.get(SpanAttributes.RPC_SERVICE) if rpc_service: - return ("rpc", description, status, http_status) + return ("rpc", description, status, http_status, origin) messaging_system = span.attributes.get(SpanAttributes.MESSAGING_SYSTEM) if messaging_system: - return ("message", description, status, http_status) + return ("message", description, status, http_status, origin) faas_trigger = span.attributes.get(SpanAttributes.FAAS_TRIGGER) if faas_trigger: - return ( - str(faas_trigger), - description, - status, - http_status, - ) + return (str(faas_trigger), description, status, http_status, origin) - return (op, description, status, http_status) + return (op, description, status, http_status, origin) def span_data_for_http_method(span): - # type: (ReadableSpan) -> tuple[str, str, Optional[str], Optional[int]] + # type: (ReadableSpan) -> tuple[str, str, Optional[str], Optional[int], Optional[str]] span_attributes = span.attributes or {} op = "http" @@ -150,11 +160,13 @@ def span_data_for_http_method(span): status, http_status = extract_span_status(span) - return (op, description, status, http_status) + origin = span_attributes.get(SentrySpanAttribute.ORIGIN) + + return (op, description, status, http_status, origin) def span_data_for_db_query(span): - # type: (ReadableSpan) -> tuple[str, str, Optional[str], Optional[int]] + # type: (ReadableSpan) -> tuple[str, str, Optional[str], Optional[int], Optional[str]] span_attributes = span.attributes or {} op = "db" @@ -163,8 +175,9 @@ def span_data_for_db_query(span): statement = cast("Optional[str]", statement) description = statement or span.name + origin = span_attributes.get(SentrySpanAttribute.ORIGIN) - return (op, description, None, None) + return (op, description, None, None, origin) def extract_span_status(span): @@ -225,3 +238,25 @@ def get_http_status_code(span_attributes): http_status = cast("Optional[int]", http_status) return http_status + + +def extract_span_attributes(span, namespace): + # type: (ReadableSpan, str) -> dict[str, Any] + """ + Extract Sentry-specific span attributes and make them look the way Sentry expects. + """ + extracted_attrs = {} + + for attr, value in (span.attributes or {}).items(): + if attr.startswith(namespace): + key = attr[len(namespace) + 1 :] + + if namespace == SentrySpanAttribute.MEASUREMENT: + value = { + "value": float(value[0]), + "unit": value[1], + } + + extracted_attrs[key] = value + + return extracted_attrs diff --git a/sentry_sdk/integrations/wsgi.py b/sentry_sdk/integrations/wsgi.py index 7a95611d78..9ea83a629c 100644 --- a/sentry_sdk/integrations/wsgi.py +++ b/sentry_sdk/integrations/wsgi.py @@ -67,8 +67,8 @@ def get_request_url(environ, use_x_forwarded_for=False): class SentryWsgiMiddleware: __slots__ = ("app", "use_x_forwarded_for", "span_origin") - def __init__(self, app, use_x_forwarded_for=False, span_origin="manual"): - # type: (Callable[[Dict[str, str], Callable[..., Any]], Any], bool, str) -> None + def __init__(self, app, use_x_forwarded_for=False, span_origin=None): + # type: (Callable[[Dict[str, str], Callable[..., Any]], Any], bool, Optional[str]) -> None self.app = app self.use_x_forwarded_for = use_x_forwarded_for self.span_origin = span_origin diff --git a/sentry_sdk/scope.py b/sentry_sdk/scope.py index 111c28dc7f..acf7d2b83e 100644 --- a/sentry_sdk/scope.py +++ b/sentry_sdk/scope.py @@ -25,6 +25,7 @@ NoOpSpan, Span, Transaction, + POTelSpan, ) from sentry_sdk._types import TYPE_CHECKING from sentry_sdk.utils import ( @@ -234,13 +235,21 @@ def get_current_scope(cls): Returns the current scope. """ - current_scope = _current_scope.get() + current_scope = cls._get_current_scope() if current_scope is None: current_scope = Scope(ty=ScopeType.CURRENT) _current_scope.set(current_scope) return current_scope + @classmethod + def _get_current_scope(cls): + # type: () -> Optional[Scope] + """ + Returns the current scope without creating a new one. Internal use only. + """ + return _current_scope.get() + @classmethod def set_current_scope(cls, new_current_scope): # type: (Scope) -> None @@ -260,13 +269,21 @@ def get_isolation_scope(cls): Returns the isolation scope. """ - isolation_scope = _isolation_scope.get() + isolation_scope = cls._get_isolation_scope() if isolation_scope is None: isolation_scope = Scope(ty=ScopeType.ISOLATION) _isolation_scope.set(isolation_scope) return isolation_scope + @classmethod + def _get_isolation_scope(cls): + # type: () -> Optional[Scope] + """ + Returns the isolation scope without creating a new one. Internal use only. + """ + return _isolation_scope.get() + @classmethod def set_isolation_scope(cls, new_isolation_scope): # type: (Scope) -> None @@ -321,13 +338,11 @@ def _merge_scopes(self, additional_scope=None, additional_scope_kwargs=None): final_scope = copy(_global_scope) if _global_scope is not None else Scope() final_scope._type = ScopeType.MERGED - isolation_scope = _isolation_scope.get() - if isolation_scope is not None: - final_scope.update_from_scope(isolation_scope) + isolation_scope = self.get_isolation_scope() + final_scope.update_from_scope(isolation_scope) - current_scope = _current_scope.get() - if current_scope is not None: - final_scope.update_from_scope(current_scope) + current_scope = self.get_current_scope() + final_scope.update_from_scope(current_scope) if self != current_scope and self != isolation_scope: final_scope.update_from_scope(self) @@ -353,7 +368,7 @@ def get_client(cls): This checks the current scope, the isolation scope and the global scope for a client. If no client is available a :py:class:`sentry_sdk.client.NonRecordingClient` is returned. """ - current_scope = _current_scope.get() + current_scope = cls._get_current_scope() try: client = current_scope.client except AttributeError: @@ -362,7 +377,7 @@ def get_client(cls): if client is not None and client.is_active(): return client - isolation_scope = _isolation_scope.get() + isolation_scope = cls._get_isolation_scope() try: client = isolation_scope.client except AttributeError: @@ -661,23 +676,6 @@ def clear(self): # self._last_event_id is only applicable to isolation scopes self._last_event_id = None # type: Optional[str] - @_attr_setter - def level(self, value): - # type: (LogLevelStr) -> None - """ - When set this overrides the level. - - .. deprecated:: 1.0.0 - Use :func:`set_level` instead. - - :param value: The level to set. - """ - logger.warning( - "Deprecated: use .set_level() instead. This will be removed in the future." - ) - - self._level = value - def set_level(self, value): # type: (LogLevelStr) -> None """ @@ -766,11 +764,12 @@ def set_user(self, value): @property def span(self): # type: () -> Optional[Span] - """Get/set current tracing span or transaction.""" + """Get current tracing span.""" return self._span @span.setter def span(self, span): + """Set current tracing span.""" # type: (Optional[Span]) -> None self._span = span # XXX: this differs from the implementation in JS, there Scope.setSpan @@ -943,6 +942,10 @@ def start_transaction( ): # type: (Optional[Transaction], Optional[SamplingContext], Unpack[TransactionKwargs]) -> Union[Transaction, NoOpSpan] """ + .. deprecated:: 3.0.0 + This function is deprecated and will be removed in a future release. + Use :py:meth:`sentry_sdk.start_span` instead. + Start and return a transaction. Start an existing transaction if given, otherwise create and start a new @@ -973,19 +976,12 @@ def start_transaction( """ kwargs.setdefault("scope", self) - client = self.get_client() - try_autostart_continuous_profiler() custom_sampling_context = custom_sampling_context or {} - # kwargs at this point has type TransactionKwargs, since we have removed - # the client and custom_sampling_context from it. - transaction_kwargs = kwargs # type: TransactionKwargs - # if we haven't been given a transaction, make one - if transaction is None: - transaction = Transaction(**transaction_kwargs) + transaction = transaction or POTelSpan(**kwargs) # use traces_sample_rate, traces_sampler, and/or inheritance to make a # sampling decision @@ -1004,39 +1000,24 @@ def start_transaction( transaction._profile = profile - # we don't bother to keep spans if we already know we're not going to - # send the transaction - max_spans = (client.options["_experiments"].get("max_spans")) or 1000 - transaction.init_span_recorder(maxlen=max_spans) - return transaction - def start_span(self, **kwargs): - # type: (Any) -> Span + def start_span(self, span=None, custom_sampling_context=None, **kwargs): + # type: (Optional[Span], Optional[SamplingContext], Any) -> Span """ - Start a span whose parent is the currently active span or transaction, if any. + Start a span whose parent is the currently active span, if any. The return value is a :py:class:`sentry_sdk.tracing.Span` instance, typically used as a context manager to start and stop timing in a `with` block. - Only spans contained in a transaction are sent to Sentry. Most - integrations start a transaction at the appropriate time, for example - for every incoming HTTP request. Use - :py:meth:`sentry_sdk.start_transaction` to start a new transaction when - one is not already in progress. - For supported `**kwargs` see :py:class:`sentry_sdk.tracing.Span`. - - The instrumenter parameter is deprecated for user code, and it will - be removed in the next major version. Going forward, it should only - be used by the SDK itself. """ with new_scope(): kwargs.setdefault("scope", self) # get current span or transaction - span = self.span or self.get_isolation_scope().span + span = span or self.span or self.get_isolation_scope().span if span is None: # New spans get the `trace_id` from the scope @@ -1045,7 +1026,7 @@ def start_span(self, **kwargs): if propagation_context is not None: kwargs["trace_id"] = propagation_context.trace_id - span = Span(**kwargs) + span = POTelSpan(**kwargs) else: # Children take `trace_id`` from the parent span. span = span.start_child(**kwargs) @@ -1053,9 +1034,9 @@ def start_span(self, **kwargs): return span def continue_trace( - self, environ_or_headers, op=None, name=None, source=None, origin="manual" + self, environ_or_headers, op=None, name=None, source=None, origin=None ): - # type: (Dict[str, Any], Optional[str], Optional[str], Optional[str], str) -> Transaction + # type: (Dict[str, Any], Optional[str], Optional[str], Optional[str], Optional[str]) -> Transaction """ Sets the propagation context from environment or headers and returns a transaction. """ @@ -1349,8 +1330,8 @@ def run_event_processors(self, event, hint): if not is_check_in: # Get scopes without creating them to prevent infinite recursion - isolation_scope = _isolation_scope.get() - current_scope = _current_scope.get() + isolation_scope = self._get_isolation_scope() + current_scope = self._get_current_scope() event_processors = chain( global_event_processors, diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py index 88ea7f55ff..c5812c9864 100644 --- a/sentry_sdk/tracing.py +++ b/sentry_sdk/tracing.py @@ -2,6 +2,9 @@ import random from datetime import datetime, timedelta, timezone +from opentelemetry import trace as otel_trace, context +from opentelemetry.trace.status import StatusCode + import sentry_sdk from sentry_sdk.consts import SPANSTATUS, SPANDATA from sentry_sdk.profiler.continuous_profiler import get_profiler_id @@ -32,6 +35,7 @@ R = TypeVar("R") import sentry_sdk.profiler + from sentry_sdk.scope import Scope from sentry_sdk._types import ( Event, MeasurementUnit, @@ -145,6 +149,10 @@ class TransactionKwargs(SpanKwargs, total=False): "url": TRANSACTION_SOURCE_ROUTE, } +DEFAULT_SPAN_ORIGIN = "manual" + +tracer = otel_trace.get_tracer(__name__) + def get_span_status_from_http_code(http_status_code): # type: (int) -> str @@ -266,7 +274,7 @@ def __init__( containing_transaction=None, # type: Optional[Transaction] start_timestamp=None, # type: Optional[Union[datetime, float]] scope=None, # type: Optional[sentry_sdk.Scope] - origin="manual", # type: str + origin=None, # type: Optional[str] ): # type: (...) -> None self.trace_id = trace_id or uuid.uuid4().hex @@ -278,7 +286,7 @@ def __init__( self.description = description self.status = status self.scope = scope - self.origin = origin + self.origin = origin or DEFAULT_SPAN_ORIGIN self._measurements = {} # type: Dict[str, MeasurementValue] self._tags = {} # type: MutableMapping[str, str] self._data = {} # type: Dict[str, Any] @@ -1172,6 +1180,373 @@ def _set_initial_sampling_decision(self, sampling_context): pass +class POTelSpan: + """ + OTel span wrapper providing compatibility with the old span interface. + """ + + # XXX Maybe it makes sense to repurpose the existing Span class for this. + # For now I'm keeping this class separate to have a clean slate. + + # XXX The wrapper itself should have as little state as possible + + def __init__( + self, + *, + active=True, # type: bool + op=None, # type: Optional[str] + description=None, # type: Optional[str] + status=None, # type: Optional[str] + scope=None, # type: Optional[Scope] + start_timestamp=None, # type: Optional[Union[datetime, float]] + origin=None, # type: Optional[str] + **_, # type: dict[str, object] + ): + # type: (...) -> None + """ + For backwards compatibility with old the old Span interface, this class + accepts arbitrary keyword arguments, in addition to the ones explicitly + listed in the signature. These additional arguments are ignored. + """ + from sentry_sdk.integrations.opentelemetry.utils import ( + convert_to_otel_timestamp, + ) + + if start_timestamp is not None: + # OTel timestamps have nanosecond precision + start_timestamp = convert_to_otel_timestamp(start_timestamp) + + # XXX deal with _otel_span being a NonRecordingSpan + self._otel_span = tracer.start_span( + description or op or "", start_time=start_timestamp + ) # XXX + self._active = active + + self.origin = origin or DEFAULT_SPAN_ORIGIN + self.op = op + self.description = description + if status is not None: + self.set_status(status) + + def __repr__(self): + # type: () -> str + return ( + "<%s(op=%r, description:%r, trace_id=%r, span_id=%r, parent_span_id=%r, sampled=%r, origin=%r)>" + % ( + self.__class__.__name__, + self.op, + self.description, + self.trace_id, + self.span_id, + self.parent_span_id, + self.sampled, + self.origin, + ) + ) + + def __enter__(self): + # type: () -> POTelSpan + # XXX use_span? https://github.com/open-telemetry/opentelemetry-python/blob/3836da8543ce9751051e38a110c0468724042e62/opentelemetry-api/src/opentelemetry/trace/__init__.py#L547 + # + # create a Context object with parent set as current span + if self._active: + ctx = otel_trace.set_span_in_context(self._otel_span) + # set as the implicit current context + self._ctx_token = context.attach(ctx) + + return self + + def __exit__(self, ty, value, tb): + # type: (Optional[Any], Optional[Any], Optional[Any]) -> None + self._otel_span.end() + # XXX set status to error if unset and an exception occurred? + if self._active: + context.detach(self._ctx_token) + + @property + def description(self): + # type: () -> Optional[str] + from sentry_sdk.integrations.opentelemetry.consts import SentrySpanAttribute + + return self._otel_span.attributes.get(SentrySpanAttribute.DESCRIPTION) + + @description.setter + def description(self, value): + # type: (Optional[str]) -> None + from sentry_sdk.integrations.opentelemetry.consts import SentrySpanAttribute + + if value is not None: + self._otel_span.set_attribute(SentrySpanAttribute.DESCRIPTION, value) + + @property + def origin(self): + # type: () -> Optional[str] + from sentry_sdk.integrations.opentelemetry.consts import SentrySpanAttribute + + return self._otel_span.attributes.get(SentrySpanAttribute.ORIGIN) + + @origin.setter + def origin(self, value): + # type: (Optional[str]) -> None + from sentry_sdk.integrations.opentelemetry.consts import SentrySpanAttribute + + if value is not None: + self._otel_span.set_attribute(SentrySpanAttribute.ORIGIN, value) + + @property + def containing_transaction(self): + # type: () -> Optional[Transaction] + """ + Get the transaction this span is a child of. + + .. deprecated:: 3.0.0 + This will be removed in the future. Use :func:`root_span` instead. + """ + logger.warning("Deprecated: This will be removed in the future.") + return self.root_span + + @containing_transaction.setter + def containing_transaction(self, value): + # type: (Span) -> None + """ + Set this span's transaction. + .. deprecated:: 3.0.0 + Use :func:`root_span` instead. + """ + pass + + @property + def root_span(self): + if isinstance(self._otel_span, otel_trace.NonRecordingSpan): + return None + + parent = None + while True: + # XXX test if this actually works + if self._otel_span.parent: + parent = self._otel_span.parent + else: + break + + return parent + + @root_span.setter + def root_span(self, value): + pass + + @property + def is_root_span(self): + if isinstance(self._otel_span, otel_trace.NonRecordingSpan): + return False + + return self._otel_span.parent is None + + @property + def parent_span_id(self): + # type: () -> Optional[str] + return self._otel_span.parent if hasattr(self._otel_span, "parent") else None + + @property + def trace_id(self): + # type: () -> Optional[str] + return self._otel_span.get_span_context().trace_id + + @property + def span_id(self): + # type: () -> Optional[str] + return self._otel_span.get_span_context().span_id + + @property + def sampled(self): + # type: () -> Optional[bool] + return self._otel_span.get_span_context().trace_flags.sampled + + @sampled.setter + def sampled(self, value): + # type: () -> Optional[bool] + pass + + @property + def op(self): + # type: () -> Optional[str] + from sentry_sdk.integrations.opentelemetry.consts import SentrySpanAttribute + + self._otel_span.attributes.get(SentrySpanAttribute.OP) + + @op.setter + def op(self, value): + # type: (Optional[str]) -> None + from sentry_sdk.integrations.opentelemetry.consts import SentrySpanAttribute + + if value is not None: + self._otel_span.set_attribute(SentrySpanAttribute.OP, value) + + @property + def name(self): + # type: () -> str + pass + + @name.setter + def name(self, value): + # type: (str) -> None + pass + + @property + def source(self): + # type: () -> str + pass + + @source.setter + def source(self, value): + # type: (str) -> None + pass + + def start_child(self, **kwargs): + # type: (str, **Any) -> POTelSpan + kwargs.setdefault("sampled", self.sampled) + + span = POTelSpan(**kwargs) + return span + + @classmethod + def continue_from_environ( + cls, + environ, # type: Mapping[str, str] + **kwargs, # type: Any + ): + # type: (...) -> POTelSpan + # XXX actually propagate + span = POTelSpan(**kwargs) + return span + + @classmethod + def continue_from_headers( + cls, + headers, # type: Mapping[str, str] + **kwargs, # type: Any + ): + # type: (...) -> POTelSpan + # XXX actually propagate + span = POTelSpan(**kwargs) + return span + + def iter_headers(self): + # type: () -> Iterator[Tuple[str, str]] + pass + + @classmethod + def from_traceparent( + cls, + traceparent, # type: Optional[str] + **kwargs, # type: Any + ): + # type: (...) -> Optional[Transaction] + # XXX actually propagate + span = POTelSpan(**kwargs) + return span + + def to_traceparent(self): + # type: () -> str + if self.sampled is True: + sampled = "1" + elif self.sampled is False: + sampled = "0" + else: + sampled = None + + traceparent = "%s-%s" % (self.trace_id, self.span_id) + if sampled is not None: + traceparent += "-%s" % (sampled,) + + return traceparent + + def to_baggage(self): + # type: () -> Optional[Baggage] + pass + + def set_tag(self, key, value): + # type: (str, Any) -> None + from sentry_sdk.integrations.opentelemetry.consts import SentrySpanAttribute + + self.set_attribute(f"{SentrySpanAttribute.TAG}.{key}", value) + + def set_data(self, key, value): + # type: (str, Any) -> None + self.set_attribute(key, value) + + def set_attribute(self, key, value): + # type: (str, Any) -> None + self._otel_span.set_attribute(key, value) + + def set_status(self, status): + # type: (str) -> None + if status == SPANSTATUS.OK: + otel_status = StatusCode.OK + otel_description = None + else: + otel_status = StatusCode.ERROR + otel_description = status.value + + self._otel_span.set_status(otel_status, otel_description) + + def set_measurement(self, name, value, unit=""): + # type: (str, float, MeasurementUnit) -> None + from sentry_sdk.integrations.opentelemetry.consts import SentrySpanAttribute + + # Stringify value here since OTel expects all seq items to be of one type + self.set_attribute( + f"{SentrySpanAttribute.MEASUREMENT}.{name}", (str(value), unit) + ) + + def set_thread(self, thread_id, thread_name): + # type: (Optional[int], Optional[str]) -> None + if thread_id is not None: + self.set_data(SPANDATA.THREAD_ID, str(thread_id)) + + if thread_name is not None: + self.set_data(SPANDATA.THREAD_NAME, thread_name) + + def set_profiler_id(self, profiler_id): + # type: (Optional[str]) -> None + if profiler_id is not None: + self.set_data(SPANDATA.PROFILER_ID, profiler_id) + + def set_http_status(self, http_status): + # type: (int) -> None + self.set_data(SPANDATA.HTTP_STATUS_CODE, http_status) + self.set_status(get_span_status_from_http_code(http_status)) + + def is_success(self): + # type: () -> bool + return self._otel_span.status.code == StatusCode.OK + + def finish(self, scope=None, end_timestamp=None): + # type: (Optional[sentry_sdk.Scope], Optional[Union[float, datetime]]) -> Optional[str] + pass + + def to_json(self): + # type: () -> dict[str, Any] + pass + + def get_trace_context(self): + # type: () -> Any + pass + + def get_profile_context(self): + # type: () -> Optional[ProfileContext] + pass + + # transaction/root span methods + + def set_context(self, key, value): + # type: (str, Any) -> None + pass + + def get_baggage(self): + # type: () -> Baggage + pass + + if TYPE_CHECKING: @overload diff --git a/sentry_sdk/tracing_utils.py b/sentry_sdk/tracing_utils.py index 0dabfbc486..aa34398884 100644 --- a/sentry_sdk/tracing_utils.py +++ b/sentry_sdk/tracing_utils.py @@ -112,7 +112,7 @@ def record_sql_queries( paramstyle, # type: Optional[str] executemany, # type: bool record_cursor_repr=False, # type: bool - span_origin="manual", # type: str + span_origin=None, # type: Optional[str] ): # type: (...) -> Generator[sentry_sdk.tracing.Span, None, None] @@ -687,7 +687,7 @@ def func_with_tracing(*args, **kwargs): def get_current_span(scope=None): - # type: (Optional[sentry_sdk.Scope]) -> Optional[Span] + # type: (Optional[sentry_sdk.Scope]) -> Optional[sentry_sdk.tracing.Span] """ Returns the currently active span if there is one running, otherwise `None` """ @@ -702,6 +702,3 @@ def get_current_span(scope=None): LOW_QUALITY_TRANSACTION_SOURCES, SENTRY_TRACE_HEADER_NAME, ) - -if TYPE_CHECKING: - from sentry_sdk.tracing import Span diff --git a/setup.py b/setup.py index a949553dfd..246569c1d2 100644 --- a/setup.py +++ b/setup.py @@ -41,6 +41,7 @@ def get_file_text(file_name): install_requires=[ "urllib3>=1.26.11", "certifi", + "opentelemetry-distro>=0.35b0", # XXX check lower bound ], extras_require={ "aiohttp": ["aiohttp>=3.5"], diff --git a/tests/conftest.py b/tests/conftest.py index c31a394fb5..46f08a0232 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -63,6 +63,7 @@ def benchmark(): from sentry_sdk import scope +import sentry_sdk.integrations.opentelemetry.scope as potel_scope @pytest.fixture(autouse=True) @@ -74,6 +75,9 @@ def clean_scopes(): scope._isolation_scope.set(None) scope._current_scope.set(None) + potel_scope._INITIAL_CURRENT_SCOPE.clear() + potel_scope._INITIAL_ISOLATION_SCOPE.clear() + @pytest.fixture(autouse=True) def internal_exceptions(request): diff --git a/tests/integrations/opentelemetry/test_potel.py b/tests/integrations/opentelemetry/test_potel.py new file mode 100644 index 0000000000..5e44cc3888 --- /dev/null +++ b/tests/integrations/opentelemetry/test_potel.py @@ -0,0 +1,316 @@ +import pytest + +from opentelemetry import trace + +import sentry_sdk + + +tracer = trace.get_tracer(__name__) + + +@pytest.fixture(autouse=True) +def sentry_init_potel(sentry_init): + sentry_init( + traces_sample_rate=1.0, + _experiments={"otel_powered_performance": True}, + ) + + +def test_root_span_transaction_payload_started_with_otel_only(capture_envelopes): + envelopes = capture_envelopes() + + with tracer.start_as_current_span("request"): + pass + + (envelope,) = envelopes + # TODO-neel-potel DSC header + (item,) = envelope.items + payload = item.payload.json + + assert payload["type"] == "transaction" + assert payload["transaction"] == "request" + assert payload["transaction_info"] == {"source": "custom"} + assert payload["timestamp"] is not None + assert payload["start_timestamp"] is not None + + contexts = payload["contexts"] + assert "runtime" in contexts + assert "otel" in contexts + assert "resource" in contexts["otel"] + + trace_context = contexts["trace"] + assert "trace_id" in trace_context + assert "span_id" in trace_context + assert trace_context["origin"] == "manual" + assert trace_context["op"] == "request" + assert trace_context["status"] == "ok" + + assert payload["spans"] == [] + + +def test_child_span_payload_started_with_otel_only(capture_envelopes): + envelopes = capture_envelopes() + + with tracer.start_as_current_span("request"): + with tracer.start_as_current_span("db"): + pass + + (envelope,) = envelopes + (item,) = envelope.items + payload = item.payload.json + (span,) = payload["spans"] + + assert span["op"] == "db" + assert span["description"] == "db" + assert span["origin"] == "manual" + assert span["status"] == "ok" + assert span["span_id"] is not None + assert span["trace_id"] == payload["contexts"]["trace"]["trace_id"] + assert span["parent_span_id"] == payload["contexts"]["trace"]["span_id"] + assert span["timestamp"] is not None + assert span["start_timestamp"] is not None + + +def test_children_span_nesting_started_with_otel_only(capture_envelopes): + envelopes = capture_envelopes() + + with tracer.start_as_current_span("request"): + with tracer.start_as_current_span("db"): + with tracer.start_as_current_span("redis"): + pass + with tracer.start_as_current_span("http"): + pass + + (envelope,) = envelopes + (item,) = envelope.items + payload = item.payload.json + (db_span, http_span, redis_span) = payload["spans"] + + assert db_span["op"] == "db" + assert redis_span["op"] == "redis" + assert http_span["op"] == "http" + + assert db_span["trace_id"] == payload["contexts"]["trace"]["trace_id"] + assert redis_span["trace_id"] == payload["contexts"]["trace"]["trace_id"] + assert http_span["trace_id"] == payload["contexts"]["trace"]["trace_id"] + + assert db_span["parent_span_id"] == payload["contexts"]["trace"]["span_id"] + assert http_span["parent_span_id"] == payload["contexts"]["trace"]["span_id"] + assert redis_span["parent_span_id"] == db_span["span_id"] + + +def test_root_span_transaction_payload_started_with_sentry_only(capture_envelopes): + envelopes = capture_envelopes() + + with sentry_sdk.start_span(description="request"): + pass + + (envelope,) = envelopes + # TODO-neel-potel DSC header + (item,) = envelope.items + payload = item.payload.json + + assert payload["type"] == "transaction" + assert payload["transaction"] == "request" + assert payload["transaction_info"] == {"source": "custom"} + assert payload["timestamp"] is not None + assert payload["start_timestamp"] is not None + + contexts = payload["contexts"] + assert "runtime" in contexts + assert "otel" in contexts + assert "resource" in contexts["otel"] + + trace_context = contexts["trace"] + assert "trace_id" in trace_context + assert "span_id" in trace_context + assert trace_context["origin"] == "manual" + assert trace_context["op"] == "request" + assert trace_context["status"] == "ok" + + assert payload["spans"] == [] + + +def test_child_span_payload_started_with_sentry_only(capture_envelopes): + envelopes = capture_envelopes() + + with sentry_sdk.start_span(description="request"): + with sentry_sdk.start_span(description="db"): + pass + + (envelope,) = envelopes + (item,) = envelope.items + payload = item.payload.json + (span,) = payload["spans"] + + assert span["op"] == "db" + assert span["description"] == "db" + assert span["origin"] == "manual" + assert span["status"] == "ok" + assert span["span_id"] is not None + assert span["trace_id"] == payload["contexts"]["trace"]["trace_id"] + assert span["parent_span_id"] == payload["contexts"]["trace"]["span_id"] + assert span["timestamp"] is not None + assert span["start_timestamp"] is not None + + +def test_children_span_nesting_started_with_sentry_only(capture_envelopes): + envelopes = capture_envelopes() + + with sentry_sdk.start_span(description="request"): + with sentry_sdk.start_span(description="db"): + with sentry_sdk.start_span(description="redis"): + pass + with sentry_sdk.start_span(description="http"): + pass + + (envelope,) = envelopes + (item,) = envelope.items + payload = item.payload.json + (db_span, http_span, redis_span) = payload["spans"] + + assert db_span["op"] == "db" + assert redis_span["op"] == "redis" + assert http_span["op"] == "http" + + assert db_span["trace_id"] == payload["contexts"]["trace"]["trace_id"] + assert redis_span["trace_id"] == payload["contexts"]["trace"]["trace_id"] + assert http_span["trace_id"] == payload["contexts"]["trace"]["trace_id"] + + assert db_span["parent_span_id"] == payload["contexts"]["trace"]["span_id"] + assert http_span["parent_span_id"] == payload["contexts"]["trace"]["span_id"] + assert redis_span["parent_span_id"] == db_span["span_id"] + + +def test_children_span_nesting_mixed(capture_envelopes): + envelopes = capture_envelopes() + + with sentry_sdk.start_span(description="request"): + with tracer.start_as_current_span("db"): + with sentry_sdk.start_span(description="redis"): + pass + with tracer.start_as_current_span("http"): + pass + + (envelope,) = envelopes + (item,) = envelope.items + payload = item.payload.json + (db_span, http_span, redis_span) = payload["spans"] + + assert db_span["op"] == "db" + assert redis_span["op"] == "redis" + assert http_span["op"] == "http" + + assert db_span["trace_id"] == payload["contexts"]["trace"]["trace_id"] + assert redis_span["trace_id"] == payload["contexts"]["trace"]["trace_id"] + assert http_span["trace_id"] == payload["contexts"]["trace"]["trace_id"] + + assert db_span["parent_span_id"] == payload["contexts"]["trace"]["span_id"] + assert http_span["parent_span_id"] == payload["contexts"]["trace"]["span_id"] + assert redis_span["parent_span_id"] == db_span["span_id"] + + +def test_span_attributes_in_data_started_with_otel(capture_envelopes): + envelopes = capture_envelopes() + + with tracer.start_as_current_span("request") as request_span: + request_span.set_attributes({"foo": "bar", "baz": 42}) + with tracer.start_as_current_span("db") as db_span: + db_span.set_attributes({"abc": 99, "def": "moo"}) + + (envelope,) = envelopes + (item,) = envelope.items + payload = item.payload.json + + assert payload["contexts"]["trace"]["data"] == {"foo": "bar", "baz": 42} + assert payload["spans"][0]["data"] == {"abc": 99, "def": "moo"} + + +def test_span_data_started_with_sentry(capture_envelopes): + envelopes = capture_envelopes() + + with sentry_sdk.start_span(op="http", description="request") as request_span: + request_span.set_data("foo", "bar") + with sentry_sdk.start_span(op="db", description="statement") as db_span: + db_span.set_data("baz", 42) + + (envelope,) = envelopes + (item,) = envelope.items + payload = item.payload.json + + assert payload["contexts"]["trace"]["data"] == { + "foo": "bar", + "sentry.origin": "manual", + "sentry.description": "request", + "sentry.op": "http", + } + assert payload["spans"][0]["data"] == { + "baz": 42, + "sentry.origin": "manual", + "sentry.description": "statement", + "sentry.op": "db", + } + + +def test_transaction_tags_started_with_otel(capture_envelopes): + envelopes = capture_envelopes() + + sentry_sdk.set_tag("tag.global", 99) + with tracer.start_as_current_span("request"): + sentry_sdk.set_tag("tag.inner", "foo") + + (envelope,) = envelopes + (item,) = envelope.items + payload = item.payload.json + + assert payload["tags"] == {"tag.global": 99, "tag.inner": "foo"} + + +def test_transaction_tags_started_with_sentry(capture_envelopes): + envelopes = capture_envelopes() + + sentry_sdk.set_tag("tag.global", 99) + with sentry_sdk.start_span(description="request"): + sentry_sdk.set_tag("tag.inner", "foo") + + (envelope,) = envelopes + (item,) = envelope.items + payload = item.payload.json + + assert payload["tags"] == {"tag.global": 99, "tag.inner": "foo"} + + +def test_multiple_transaction_tags_isolation_scope_started_with_otel(capture_envelopes): + envelopes = capture_envelopes() + + sentry_sdk.set_tag("tag.global", 99) + with sentry_sdk.isolation_scope(): + with tracer.start_as_current_span("request a"): + sentry_sdk.set_tag("tag.inner.a", "a") + with sentry_sdk.isolation_scope(): + with tracer.start_as_current_span("request b"): + sentry_sdk.set_tag("tag.inner.b", "b") + + (payload_a, payload_b) = [envelope.items[0].payload.json for envelope in envelopes] + + assert payload_a["tags"] == {"tag.global": 99, "tag.inner.a": "a"} + assert payload_b["tags"] == {"tag.global": 99, "tag.inner.b": "b"} + + +def test_multiple_transaction_tags_isolation_scope_started_with_sentry( + capture_envelopes, +): + envelopes = capture_envelopes() + + sentry_sdk.set_tag("tag.global", 99) + with sentry_sdk.isolation_scope(): + with sentry_sdk.start_span(description="request a"): + sentry_sdk.set_tag("tag.inner.a", "a") + with sentry_sdk.isolation_scope(): + with sentry_sdk.start_span(description="request b"): + sentry_sdk.set_tag("tag.inner.b", "b") + + (payload_a, payload_b) = [envelope.items[0].payload.json for envelope in envelopes] + + assert payload_a["tags"] == {"tag.global": 99, "tag.inner.a": "a"} + assert payload_b["tags"] == {"tag.global": 99, "tag.inner.b": "b"} diff --git a/tests/integrations/opentelemetry/test_utils.py b/tests/integrations/opentelemetry/test_utils.py index ceb58a58ef..66ffd7898a 100644 --- a/tests/integrations/opentelemetry/test_utils.py +++ b/tests/integrations/opentelemetry/test_utils.py @@ -23,6 +23,7 @@ "description": "OTel Span Blank", "status": "ok", "http_status_code": None, + "origin": None, }, ), ( @@ -36,6 +37,7 @@ "description": "OTel Span RPC", "status": "ok", "http_status_code": None, + "origin": None, }, ), ( @@ -49,6 +51,7 @@ "description": "OTel Span Messaging", "status": "ok", "http_status_code": None, + "origin": None, }, ), ( @@ -62,6 +65,7 @@ "description": "OTel Span FaaS", "status": "ok", "http_status_code": None, + "origin": None, }, ), ], @@ -72,12 +76,13 @@ def test_extract_span_data(name, status, attributes, expected): otel_span.status = Status(StatusCode.UNSET) otel_span.attributes = attributes - op, description, status, http_status_code = extract_span_data(otel_span) + op, description, status, http_status_code, origin = extract_span_data(otel_span) result = { "op": op, "description": description, "status": status, "http_status_code": http_status_code, + "origin": origin, } assert result == expected @@ -99,6 +104,7 @@ def test_extract_span_data(name, status, attributes, expected): "description": "GET", "status": "ok", "http_status_code": None, + "origin": None, }, ), ( @@ -113,6 +119,7 @@ def test_extract_span_data(name, status, attributes, expected): "description": "GET /target", "status": "ok", "http_status_code": None, + "origin": None, }, ), ( @@ -127,6 +134,7 @@ def test_extract_span_data(name, status, attributes, expected): "description": "GET example.com", "status": "ok", "http_status_code": None, + "origin": None, }, ), ( @@ -142,6 +150,7 @@ def test_extract_span_data(name, status, attributes, expected): "description": "GET /target", "status": "ok", "http_status_code": None, + "origin": None, }, ), ( @@ -156,6 +165,7 @@ def test_extract_span_data(name, status, attributes, expected): "description": "GET https://example.com/bla/", "status": "ok", "http_status_code": None, + "origin": None, }, ), ], @@ -166,12 +176,15 @@ def test_span_data_for_http_method(kind, status, attributes, expected): otel_span.status = status otel_span.attributes = attributes - op, description, status, http_status_code = span_data_for_http_method(otel_span) + op, description, status, http_status_code, origin = span_data_for_http_method( + otel_span + ) result = { "op": op, "description": description, "status": status, "http_status_code": http_status_code, + "origin": origin, } assert result == expected @@ -181,19 +194,21 @@ def test_span_data_for_db_query(): otel_span.name = "OTel Span" otel_span.attributes = {} - op, description, status, http_status = span_data_for_db_query(otel_span) + op, description, status, http_status, origin = span_data_for_db_query(otel_span) assert op == "db" assert description == "OTel Span" assert status is None assert http_status is None + assert origin is None otel_span.attributes = {"db.statement": "SELECT * FROM table;"} - op, description, status, http_status = span_data_for_db_query(otel_span) + op, description, status, http_status, origin = span_data_for_db_query(otel_span) assert op == "db" assert description == "SELECT * FROM table;" assert status is None assert http_status is None + assert origin is None @pytest.mark.parametrize( diff --git a/tests/test_api.py b/tests/test_api.py index ffe1be756d..46fc24fd24 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -30,7 +30,7 @@ def test_get_current_span(): @pytest.mark.forked -def test_get_current_span_default_hub(sentry_init): +def test_get_current_span_current_scope(sentry_init): sentry_init() assert get_current_span() is None @@ -43,7 +43,7 @@ def test_get_current_span_default_hub(sentry_init): @pytest.mark.forked -def test_get_current_span_default_hub_with_transaction(sentry_init): +def test_get_current_span_current_scope_with_transaction(sentry_init): sentry_init() assert get_current_span() is None diff --git a/tests/tracing/test_misc.py b/tests/tracing/test_misc.py index 02966642fd..996d9c4d5d 100644 --- a/tests/tracing/test_misc.py +++ b/tests/tracing/test_misc.py @@ -258,7 +258,7 @@ def test_circular_references(monkeypatch, sentry_init, request): assert gc.collect() == 0 -def test_set_meaurement(sentry_init, capture_events): +def test_set_measurement(sentry_init, capture_events): sentry_init(traces_sample_rate=1.0) events = capture_events() @@ -286,7 +286,7 @@ def test_set_meaurement(sentry_init, capture_events): assert event["measurements"]["metric.foobar"] == {"value": 17.99, "unit": "percent"} -def test_set_meaurement_public_api(sentry_init, capture_events): +def test_set_measurement_public_api(sentry_init, capture_events): sentry_init(traces_sample_rate=1.0) events = capture_events() @@ -412,7 +412,7 @@ def test_transaction_dropped_debug_not_started(sentry_init, sampled): ) -def test_transaction_dropeed_sampled_false(sentry_init): +def test_transaction_dropped_sampled_false(sentry_init): sentry_init(enable_tracing=True) tx = Transaction(sampled=False) From 5e35db82935f76cdeb6f4ecbec5397b32008658c Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Mon, 26 Aug 2024 16:50:29 +0200 Subject: [PATCH 015/244] Implement new continue_trace and make WSGI work (#3460) * Implement new continue_trace and make WSGI work The new `continue_trace` API will no longer return a `Transaction` entity. Instead, it will simply update the propagation context and run as a contextmanager. (TODO) It will set a remote span on the OTEL context so that it can be picked up by `start_span` later. --- sentry_sdk/api.py | 14 ++++--- .../integrations/opentelemetry/scope.py | 10 ++++- sentry_sdk/integrations/wsgi.py | 39 +++++++++---------- sentry_sdk/tracing.py | 30 +++++++++++++- 4 files changed, 64 insertions(+), 29 deletions(-) diff --git a/sentry_sdk/api.py b/sentry_sdk/api.py index 0b88ea3274..36cefb9a57 100644 --- a/sentry_sdk/api.py +++ b/sentry_sdk/api.py @@ -1,4 +1,5 @@ import inspect +from contextlib import contextmanager from sentry_sdk import tracing_utils, Client from sentry_sdk._init_implementation import init @@ -23,6 +24,7 @@ from typing import Callable from typing import TypeVar from typing import Union + from typing import Generator from typing_extensions import Unpack @@ -336,11 +338,11 @@ def get_baggage(): return None -def continue_trace(environ_or_headers, op=None, name=None, source=None, origin=None): - # type: (Dict[str, Any], Optional[str], Optional[str], Optional[str], Optional[str]) -> Transaction +@contextmanager +def continue_trace(environ_or_headers): + # type: (Dict[str, Any]) -> Generator[None, None, None] """ - Sets the propagation context from environment or headers and returns a transaction. + Sets the propagation context from environment or headers to continue an incoming trace. """ - return get_isolation_scope().continue_trace( - environ_or_headers, op, name, source, origin - ) + with get_isolation_scope().continue_trace(environ_or_headers): + yield diff --git a/sentry_sdk/integrations/opentelemetry/scope.py b/sentry_sdk/integrations/opentelemetry/scope.py index 6d6f8f6acf..c1eacc3852 100644 --- a/sentry_sdk/integrations/opentelemetry/scope.py +++ b/sentry_sdk/integrations/opentelemetry/scope.py @@ -12,7 +12,7 @@ from sentry_sdk._types import TYPE_CHECKING if TYPE_CHECKING: - from typing import Tuple, Optional, Generator + from typing import Tuple, Optional, Generator, Dict, Any class PotelScope(Scope): @@ -58,6 +58,14 @@ def _get_isolation_scope(cls): scopes = cls._get_scopes() return scopes[1] if scopes else None + @contextmanager + def continue_trace(self, environ_or_headers): + # type: (Dict[str, Any]) -> Generator[None, None, None] + with new_scope() as scope: + scope.generate_propagation_context(environ_or_headers) + # TODO-neel-potel add remote span on context + yield + _INITIAL_CURRENT_SCOPE = PotelScope(ty=ScopeType.CURRENT) _INITIAL_ISOLATION_SCOPE = PotelScope(ty=ScopeType.ISOLATION) diff --git a/sentry_sdk/integrations/wsgi.py b/sentry_sdk/integrations/wsgi.py index 9ea83a629c..cd3b53e805 100644 --- a/sentry_sdk/integrations/wsgi.py +++ b/sentry_sdk/integrations/wsgi.py @@ -91,26 +91,25 @@ def __call__(self, environ, start_response): ) ) - transaction = continue_trace( - environ, - op=OP.HTTP_SERVER, - name="generic WSGI request", - source=TRANSACTION_SOURCE_ROUTE, - origin=self.span_origin, - ) - - with sentry_sdk.start_transaction( - transaction, custom_sampling_context={"wsgi_environ": environ} - ): - try: - response = self.app( - environ, - partial( - _sentry_start_response, start_response, transaction - ), - ) - except BaseException: - reraise(*_capture_exception()) + with continue_trace(environ): + with sentry_sdk.start_transaction( + op=OP.HTTP_SERVER, + name="generic WSGI request", + source=TRANSACTION_SOURCE_ROUTE, + origin=self.span_origin, + custom_sampling_context={"wsgi_environ": environ}, + ) as transaction: + try: + response = self.app( + environ, + partial( + _sentry_start_response, + start_response, + transaction, + ), + ) + except BaseException: + reraise(*_capture_exception()) finally: _wsgi_middleware_applied.set(False) diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py index c5812c9864..c04b51a344 100644 --- a/sentry_sdk/tracing.py +++ b/sentry_sdk/tracing.py @@ -89,7 +89,7 @@ class SpanKwargs(TypedDict, total=False): scope: "sentry_sdk.Scope" """The scope to use for this span. If not provided, we use the current scope.""" - origin: str + origin: Optional[str] """ The origin of the span. See https://develop.sentry.dev/sdk/performance/trace-origin/ @@ -1401,6 +1401,32 @@ def source(self, value): # type: (str) -> None pass + @property + def start_timestamp(self): + # type: () -> Optional[datetime] + start_time = self._otel_span.start_time + if start_time is None: + return None + + from sentry_sdk.integrations.opentelemetry.utils import ( + convert_from_otel_timestamp, + ) + + return convert_from_otel_timestamp(start_time) + + @property + def timestamp(self): + # type: () -> Optional[datetime] + end_time = self._otel_span.end_time + if end_time is None: + return None + + from sentry_sdk.integrations.opentelemetry.utils import ( + convert_from_otel_timestamp, + ) + + return convert_from_otel_timestamp(end_time) + def start_child(self, **kwargs): # type: (str, **Any) -> POTelSpan kwargs.setdefault("sampled", self.sampled) @@ -1485,7 +1511,7 @@ def set_status(self, status): otel_description = None else: otel_status = StatusCode.ERROR - otel_description = status.value + otel_description = status self._otel_span.set_status(otel_status, otel_description) From c6b96ca80e7935da8e49609f12f7ad7ad81830e4 Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Mon, 26 Aug 2024 16:55:37 +0200 Subject: [PATCH 016/244] Update migration guide for continue_trace --- MIGRATION_GUIDE.md | 1 + 1 file changed, 1 insertion(+) diff --git a/MIGRATION_GUIDE.md b/MIGRATION_GUIDE.md index 7a71c3e872..20195b0658 100644 --- a/MIGRATION_GUIDE.md +++ b/MIGRATION_GUIDE.md @@ -15,6 +15,7 @@ Looking to upgrade from Sentry SDK 2.x to 3.x? Here's a comprehensive list of wh - `Span.finish()` does not accept a `hub` parameter anymore. - The `Profile()` constructor does not accept a `hub` parameter anymore. - A `Profile` object does not have a `.hub` property anymore. +- `sentry_sdk.continue_trace` no longer returns a `Transaction` and is now a context manager. ### Removed From 7b8829026ae395caba7ebe4e764e4f5bf87e2865 Mon Sep 17 00:00:00 2001 From: Ivana Kellyer Date: Wed, 28 Aug 2024 11:08:55 +0200 Subject: [PATCH 017/244] update mig guide --- MIGRATION_GUIDE.md | 1 + 1 file changed, 1 insertion(+) diff --git a/MIGRATION_GUIDE.md b/MIGRATION_GUIDE.md index 20195b0658..40d1a61339 100644 --- a/MIGRATION_GUIDE.md +++ b/MIGRATION_GUIDE.md @@ -19,6 +19,7 @@ Looking to upgrade from Sentry SDK 2.x to 3.x? Here's a comprehensive list of wh ### Removed +- Dropped support for Python 3.6. - When setting span status, the HTTP status code is no longer automatically added as a tag. - Class `Hub` has been removed. - Class `_ScopeManager` has been removed. From 3f9e2d0a421a3991d5fe011830467813f6573dfd Mon Sep 17 00:00:00 2001 From: Ivana Kellyer Date: Wed, 28 Aug 2024 11:12:01 +0200 Subject: [PATCH 018/244] fix english --- MIGRATION_GUIDE.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/MIGRATION_GUIDE.md b/MIGRATION_GUIDE.md index 40d1a61339..deab7ae038 100644 --- a/MIGRATION_GUIDE.md +++ b/MIGRATION_GUIDE.md @@ -3,7 +3,7 @@ ## Upgrading to 3.0 -Looking to upgrade from Sentry SDK 2.x to 3.x? Here's a comprehensive list of what's changed. Looking for a more digestable summary? See the [guide in the docs](https://docs.sentry.io/platforms/python/migration/2.x-to-3.x) with the most common migration patterns. +Looking to upgrade from Sentry SDK 2.x to 3.x? Here's a comprehensive list of what's changed. Looking for a more digestible summary? See the [guide in the docs](https://docs.sentry.io/platforms/python/migration/2.x-to-3.x) with the most common migration patterns. ### New Features @@ -34,7 +34,7 @@ Looking to upgrade from Sentry SDK 2.x to 3.x? Here's a comprehensive list of wh ## Upgrading to 2.0 -Looking to upgrade from Sentry SDK 1.x to 2.x? Here's a comprehensive list of what's changed. Looking for a more digestable summary? See the [guide in the docs](https://docs.sentry.io/platforms/python/migration/1.x-to-2.x) with the most common migration patterns. +Looking to upgrade from Sentry SDK 1.x to 2.x? Here's a comprehensive list of what's changed. Looking for a more digestible summary? See the [guide in the docs](https://docs.sentry.io/platforms/python/migration/1.x-to-2.x) with the most common migration patterns. ### New Features From 64cbaff394b8166fcdf28064437d3a22c20b6500 Mon Sep 17 00:00:00 2001 From: Daniel Szoke <7881302+szokeasaurusrex@users.noreply.github.com> Date: Wed, 28 Aug 2024 11:12:17 +0200 Subject: [PATCH 019/244] Fix "indented block" linter error (#3468) * fix: Fix mypy "indented block" error Fix this [mypy error](https://github.com/getsentry/sentry-python/actions/runs/10390581522/job/28771379618): ``` sentry_sdk/integrations/opentelemetry/potel_span_processor.py:149: error: expected an indented block after function definition on line 31 [syntax] ``` Honestly not sure why this change fixes the problem, maybe there is some bug in `mypy`. * fix: Fix other mypy syntax failures After fixing the previous mypy failure, mypy discovered more syntax problems, which this commit fixes. * fix: Correct typing in `potel_span_processor` Fixing the original mypy error broke the typing; this change fixes the typing. --- .../opentelemetry/potel_span_processor.py | 12 ++++++++---- sentry_sdk/integrations/opentelemetry/scope.py | 2 +- sentry_sdk/scope.py | 2 +- sentry_sdk/tracing.py | 2 +- 4 files changed, 11 insertions(+), 7 deletions(-) diff --git a/sentry_sdk/integrations/opentelemetry/potel_span_processor.py b/sentry_sdk/integrations/opentelemetry/potel_span_processor.py index 8b2a2f4c36..db4c1f58d6 100644 --- a/sentry_sdk/integrations/opentelemetry/potel_span_processor.py +++ b/sentry_sdk/integrations/opentelemetry/potel_span_processor.py @@ -1,4 +1,5 @@ from collections import deque, defaultdict +from typing import cast from opentelemetry.trace import format_trace_id, format_span_id from opentelemetry.context import Context @@ -145,7 +146,7 @@ def _root_span_to_transaction_event(self, span): "transaction_info": {"source": "custom"}, "contexts": contexts, } - ) # type: Event + ) return event @@ -154,7 +155,10 @@ def _span_to_json(self, span): if not span.context: return None - span_json = self._common_span_transaction_attributes_as_json(span) + # This is a safe cast because dict[str, Any] is a superset of Event + span_json = cast( + "dict[str, Any]", self._common_span_transaction_attributes_as_json(span) + ) if span_json is None: return None @@ -184,14 +188,14 @@ def _span_to_json(self, span): return span_json def _common_span_transaction_attributes_as_json(self, span): - # type: (ReadableSpan) -> Optional[dict[str, Any]] + # type: (ReadableSpan) -> Optional[Event] if not span.start_time or not span.end_time: return None common_json = { "start_timestamp": convert_from_otel_timestamp(span.start_time), "timestamp": convert_from_otel_timestamp(span.end_time), - } # type: dict[str, Any] + } # type: Event measurements = extract_span_attributes(span, SentrySpanAttribute.MEASUREMENT) if measurements: diff --git a/sentry_sdk/integrations/opentelemetry/scope.py b/sentry_sdk/integrations/opentelemetry/scope.py index c1eacc3852..50a7e45b01 100644 --- a/sentry_sdk/integrations/opentelemetry/scope.py +++ b/sentry_sdk/integrations/opentelemetry/scope.py @@ -43,10 +43,10 @@ def _get_current_scope(cls): @classmethod def get_isolation_scope(cls): + # type: () -> Scope """ Returns the isolation scope. """ - # type: () -> Scope return cls._get_isolation_scope() or _INITIAL_ISOLATION_SCOPE @classmethod diff --git a/sentry_sdk/scope.py b/sentry_sdk/scope.py index c5502d4649..20f8e95325 100644 --- a/sentry_sdk/scope.py +++ b/sentry_sdk/scope.py @@ -770,8 +770,8 @@ def span(self): @span.setter def span(self, span): - """Set current tracing span.""" # type: (Optional[Span]) -> None + """Set current tracing span.""" self._span = span # XXX: this differs from the implementation in JS, there Scope.setSpan # does not set Scope._transactionName. diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py index 5aa4cc3c7f..35b3bdf877 100644 --- a/sentry_sdk/tracing.py +++ b/sentry_sdk/tracing.py @@ -1364,7 +1364,7 @@ def sampled(self): @sampled.setter def sampled(self, value): - # type: () -> Optional[bool] + # type: (Optional[bool]) -> None pass @property From 433136267d28e7c6d24ba97456a1fd465200234d Mon Sep 17 00:00:00 2001 From: Ivana Kellyer Date: Wed, 28 Aug 2024 14:43:11 +0200 Subject: [PATCH 020/244] Remove the _active flag from POTelSpan (#3470) --- sentry_sdk/tracing.py | 12 ++++-------- 1 file changed, 4 insertions(+), 8 deletions(-) diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py index 35b3bdf877..ee30bc2cdb 100644 --- a/sentry_sdk/tracing.py +++ b/sentry_sdk/tracing.py @@ -1194,7 +1194,6 @@ class POTelSpan: def __init__( self, *, - active=True, # type: bool op=None, # type: Optional[str] description=None, # type: Optional[str] status=None, # type: Optional[str] @@ -1221,7 +1220,6 @@ def __init__( self._otel_span = tracer.start_span( description or op or "", start_time=start_timestamp ) # XXX - self._active = active self.origin = origin or DEFAULT_SPAN_ORIGIN self.op = op @@ -1250,10 +1248,9 @@ def __enter__(self): # XXX use_span? https://github.com/open-telemetry/opentelemetry-python/blob/3836da8543ce9751051e38a110c0468724042e62/opentelemetry-api/src/opentelemetry/trace/__init__.py#L547 # # create a Context object with parent set as current span - if self._active: - ctx = otel_trace.set_span_in_context(self._otel_span) - # set as the implicit current context - self._ctx_token = context.attach(ctx) + ctx = otel_trace.set_span_in_context(self._otel_span) + # set as the implicit current context + self._ctx_token = context.attach(ctx) return self @@ -1261,8 +1258,7 @@ def __exit__(self, ty, value, tb): # type: (Optional[Any], Optional[Any], Optional[Any]) -> None self._otel_span.end() # XXX set status to error if unset and an exception occurred? - if self._active: - context.detach(self._ctx_token) + context.detach(self._ctx_token) @property def description(self): From 268a524e51fdaf7909e5537518eb8d5c921951ef Mon Sep 17 00:00:00 2001 From: Ivana Kellyer Date: Wed, 28 Aug 2024 14:43:22 +0200 Subject: [PATCH 021/244] Add OTelIntegration to DEFAULT_INTEGRATIONS (#3471) --- sentry_sdk/client.py | 14 +------------- sentry_sdk/integrations/__init__.py | 1 + 2 files changed, 2 insertions(+), 13 deletions(-) diff --git a/sentry_sdk/client.py b/sentry_sdk/client.py index 2c99394cd1..03ed439b71 100644 --- a/sentry_sdk/client.py +++ b/sentry_sdk/client.py @@ -30,7 +30,7 @@ VERSION, ClientConstructor, ) -from sentry_sdk.integrations import _DEFAULT_INTEGRATIONS, setup_integrations +from sentry_sdk.integrations import setup_integrations from sentry_sdk.sessions import SessionFlusher from sentry_sdk.envelope import Envelope from sentry_sdk.profiler.continuous_profiler import setup_continuous_profiler @@ -345,18 +345,6 @@ def _capture_envelope(envelope): ) ) - if self.options["_experiments"].get("otel_powered_performance", False): - logger.debug( - "[OTel] Enabling experimental OTel-powered performance monitoring." - ) - if ( - "sentry_sdk.integrations.opentelemetry.integration.OpenTelemetryIntegration" - not in _DEFAULT_INTEGRATIONS - ): - _DEFAULT_INTEGRATIONS.append( - "sentry_sdk.integrations.opentelemetry.integration.OpenTelemetryIntegration", - ) - self.integrations = setup_integrations( self.options["integrations"], with_defaults=self.options["default_integrations"], diff --git a/sentry_sdk/integrations/__init__.py b/sentry_sdk/integrations/__init__.py index 35f809bde7..9fef954939 100644 --- a/sentry_sdk/integrations/__init__.py +++ b/sentry_sdk/integrations/__init__.py @@ -65,6 +65,7 @@ def iter_default_integrations(with_auto_enabling_integrations): "sentry_sdk.integrations.excepthook.ExcepthookIntegration", "sentry_sdk.integrations.logging.LoggingIntegration", "sentry_sdk.integrations.modules.ModulesIntegration", + "sentry_sdk.integrations.opentelemetry.integration.OpenTelemetryIntegration", "sentry_sdk.integrations.stdlib.StdlibIntegration", "sentry_sdk.integrations.threading.ThreadingIntegration", ] From f980b97ed221e1cbb7e201a61bfa2f45729a9f33 Mon Sep 17 00:00:00 2001 From: Ivana Kellyer Date: Thu, 29 Aug 2024 09:41:13 +0200 Subject: [PATCH 022/244] Set span on scope (#3472) --- sentry_sdk/scope.py | 63 ++++++++++++++++++++++++------------------- sentry_sdk/tracing.py | 5 +++- 2 files changed, 40 insertions(+), 28 deletions(-) diff --git a/sentry_sdk/scope.py b/sentry_sdk/scope.py index 20f8e95325..c19a0e523f 100644 --- a/sentry_sdk/scope.py +++ b/sentry_sdk/scope.py @@ -696,19 +696,13 @@ def fingerprint(self, value): def transaction(self): # type: () -> Any # would be type: () -> Optional[Transaction], see https://github.com/python/mypy/issues/3004 - """Return the transaction (root span) in the scope, if any.""" - - # there is no span/transaction on the scope - if self._span is None: - return None - - # there is an orphan span on the scope - if self._span.containing_transaction is None: - return None + """ + Return the transaction (root span) in the scope, if any. - # there is either a transaction (which is its own containing - # transaction) or a non-orphan span on the scope - return self._span.containing_transaction + .. deprecated:: 3.0.0 + This property is deprecated. Use root_span instead. + """ + return self.root_span @transaction.setter def transaction(self, value): @@ -735,6 +729,22 @@ def transaction(self, value): if self._span and self._span.containing_transaction: self._span.containing_transaction.name = value + @property + def root_span(self): + # type: () -> POTelSpan + """Return the root span in the scope, if any.""" + + # there is no span on the scope + if self._span is None: + return None + + # this is a root span + if self._span.root_span is None: + return self._span + + # get the topmost parent + return self._span.root_span + def set_transaction_name(self, name, source=None): # type: (str, Optional[str]) -> None """Set the transaction name and optionally the transaction source.""" @@ -1014,25 +1024,24 @@ def start_span(self, span=None, custom_sampling_context=None, **kwargs): For supported `**kwargs` see :py:class:`sentry_sdk.tracing.Span`. """ - with new_scope(): - kwargs.setdefault("scope", self) + kwargs.setdefault("scope", self) - # get current span or transaction - span = span or self.span or self.get_isolation_scope().span + # get current span or transaction + span = span or self.span or self.get_isolation_scope().span - if span is None: - # New spans get the `trace_id` from the scope - if "trace_id" not in kwargs: - propagation_context = self.get_active_propagation_context() - if propagation_context is not None: - kwargs["trace_id"] = propagation_context.trace_id + if span is None: + # New spans get the `trace_id` from the scope + if "trace_id" not in kwargs: + propagation_context = self.get_active_propagation_context() + if propagation_context is not None: + kwargs["trace_id"] = propagation_context.trace_id - span = POTelSpan(**kwargs) - else: - # Children take `trace_id`` from the parent span. - span = span.start_child(**kwargs) + span = POTelSpan(**kwargs) + else: + # Children take `trace_id`` from the parent span. + span = span.start_child(**kwargs) - return span + return span def continue_trace( self, environ_or_headers, op=None, name=None, source=None, origin=None diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py index ee30bc2cdb..6496327834 100644 --- a/sentry_sdk/tracing.py +++ b/sentry_sdk/tracing.py @@ -1252,6 +1252,10 @@ def __enter__(self): # set as the implicit current context self._ctx_token = context.attach(ctx) + # get the new scope that was forked on context.attach + self.scope = sentry_sdk.get_current_scope() + self.scope.span = self + return self def __exit__(self, ty, value, tb): @@ -1319,7 +1323,6 @@ def root_span(self): parent = None while True: - # XXX test if this actually works if self._otel_span.parent: parent = self._otel_span.parent else: From c74a947b26402ad3c7861e66b8647e75bb8099e8 Mon Sep 17 00:00:00 2001 From: Ivana Kellyer Date: Thu, 29 Aug 2024 14:20:41 +0200 Subject: [PATCH 023/244] fix integrations tests --- tests/conftest.py | 10 ++-------- 1 file changed, 2 insertions(+), 8 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 514dffafb0..c7ade0bcdc 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -24,7 +24,6 @@ import sentry_sdk.utils from sentry_sdk.envelope import Envelope from sentry_sdk.integrations import ( # noqa: F401 - _DEFAULT_INTEGRATIONS, _installed_integrations, _processed_integrations, ) @@ -177,13 +176,8 @@ def reset_integrations(): with a clean slate to ensure monkeypatching works well, but this also means some other stuff will be monkeypatched twice. """ - global _DEFAULT_INTEGRATIONS, _processed_integrations - try: - _DEFAULT_INTEGRATIONS.remove( - "sentry_sdk.integrations.opentelemetry.integration.OpenTelemetryIntegration" - ) - except ValueError: - pass + global _installed_integrations, _processed_integrations + _processed_integrations.clear() _installed_integrations.clear() From e573d617dcf34c6cfe1e71319337a287cf448443 Mon Sep 17 00:00:00 2001 From: Ivana Kellyer Date: Thu, 29 Aug 2024 16:00:13 +0200 Subject: [PATCH 024/244] Fix some properties (#3474) * fix some properties * remove parent --- .../integrations/opentelemetry/consts.py | 1 + sentry_sdk/tracing.py | 37 ++++++++++--------- 2 files changed, 21 insertions(+), 17 deletions(-) diff --git a/sentry_sdk/integrations/opentelemetry/consts.py b/sentry_sdk/integrations/opentelemetry/consts.py index aca364fd54..8af978f8fd 100644 --- a/sentry_sdk/integrations/opentelemetry/consts.py +++ b/sentry_sdk/integrations/opentelemetry/consts.py @@ -21,3 +21,4 @@ class SentrySpanAttribute: ORIGIN = "sentry.origin" MEASUREMENT = "sentry.measurement" TAG = "sentry.tag" + NAME = "sentry.name" diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py index 6496327834..43a17e44ff 100644 --- a/sentry_sdk/tracing.py +++ b/sentry_sdk/tracing.py @@ -1200,6 +1200,7 @@ def __init__( scope=None, # type: Optional[Scope] start_timestamp=None, # type: Optional[Union[datetime, float]] origin=None, # type: Optional[str] + name=None, # type: Optional[str] **_, # type: dict[str, object] ): # type: (...) -> None @@ -1224,6 +1225,7 @@ def __init__( self.origin = origin or DEFAULT_SPAN_ORIGIN self.op = op self.description = description + self.name = name if status is not None: self.set_status(status) @@ -1303,7 +1305,9 @@ def containing_transaction(self): .. deprecated:: 3.0.0 This will be removed in the future. Use :func:`root_span` instead. """ - logger.warning("Deprecated: This will be removed in the future.") + logger.warning( + "Deprecated: This will be removed in the future. Use root_span instead." + ) return self.root_span @containing_transaction.setter @@ -1318,17 +1322,11 @@ def containing_transaction(self, value): @property def root_span(self): - if isinstance(self._otel_span, otel_trace.NonRecordingSpan): - return None - - parent = None - while True: - if self._otel_span.parent: - parent = self._otel_span.parent - else: - break - - return parent + # type: () -> Optional[POTelSpan] + # XXX implement this + # there's a span.parent property, but it returns the parent spancontext + # not sure if there's a way to retrieve the parent with pure otel. + return None @root_span.setter def root_span(self, value): @@ -1371,7 +1369,7 @@ def op(self): # type: () -> Optional[str] from sentry_sdk.integrations.opentelemetry.consts import SentrySpanAttribute - self._otel_span.attributes.get(SentrySpanAttribute.OP) + return self._otel_span.attributes.get(SentrySpanAttribute.OP) @op.setter def op(self, value): @@ -1383,13 +1381,18 @@ def op(self, value): @property def name(self): - # type: () -> str - pass + # type: () -> Optional[str] + from sentry_sdk.integrations.opentelemetry.consts import SentrySpanAttribute + + return self._otel_span.attributes.get(SentrySpanAttribute.NAME) @name.setter def name(self, value): - # type: (str) -> None - pass + # type: (Optional[str]) -> None + from sentry_sdk.integrations.opentelemetry.consts import SentrySpanAttribute + + if value is not None: + self._otel_span.set_attribute(SentrySpanAttribute.NAME, value) @property def source(self): From 3d8cc7c6111dfa5cb8964a14983ad530917c2748 Mon Sep 17 00:00:00 2001 From: Ivana Kellyer Date: Fri, 30 Aug 2024 09:53:03 +0200 Subject: [PATCH 025/244] Add note to mig guide --- MIGRATION_GUIDE.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/MIGRATION_GUIDE.md b/MIGRATION_GUIDE.md index deab7ae038..9f802d1933 100644 --- a/MIGRATION_GUIDE.md +++ b/MIGRATION_GUIDE.md @@ -27,6 +27,8 @@ Looking to upgrade from Sentry SDK 2.x to 3.x? Here's a comprehensive list of wh - The context manager `auto_session_tracking_scope()` has been removed. Use `track_session()` instead. - Utility function `is_auto_session_tracking_enabled()` has been removed. There is no public replacement. There is a private `_is_auto_session_tracking_enabled()` (if you absolutely need this function) It accepts a `scope` parameter instead of the previously used `hub` parameter. - Utility function `is_auto_session_tracking_enabled_scope()` has been removed. There is no public replacement. There is a private `_is_auto_session_tracking_enabled()` (if you absolutely need this function) +- Setting `scope.level` has been removed. Use `scope.set_level` instead. + ### Deprecated From 2e2be3bd0329936d2ff146834abfc1c5ffbefeac Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Fri, 30 Aug 2024 13:56:45 +0200 Subject: [PATCH 026/244] Set the correct SpanContext in continue_trace (#3475) --- sentry_sdk/api.py | 3 +- .../opentelemetry/potel_span_processor.py | 3 +- .../integrations/opentelemetry/scope.py | 43 +++++++++++++++++-- sentry_sdk/tracing.py | 9 ++-- 4 files changed, 47 insertions(+), 11 deletions(-) diff --git a/sentry_sdk/api.py b/sentry_sdk/api.py index e724a3b317..db0ce275fd 100644 --- a/sentry_sdk/api.py +++ b/sentry_sdk/api.py @@ -237,7 +237,6 @@ def flush( def start_span( *, - span=None, custom_sampling_context=None, **kwargs, # type: Any ): @@ -255,7 +254,7 @@ def start_span( method. """ # TODO: Consider adding type hints to the method signature. - return get_current_scope().start_span(span, custom_sampling_context, **kwargs) + return get_current_scope().start_span(custom_sampling_context, **kwargs) def start_transaction( diff --git a/sentry_sdk/integrations/opentelemetry/potel_span_processor.py b/sentry_sdk/integrations/opentelemetry/potel_span_processor.py index db4c1f58d6..06376ec3e6 100644 --- a/sentry_sdk/integrations/opentelemetry/potel_span_processor.py +++ b/sentry_sdk/integrations/opentelemetry/potel_span_processor.py @@ -51,8 +51,7 @@ def on_end(self, span): if is_sentry_span(span): return - # TODO-neel-potel-remote only take parent if not remote - if span.parent: + if span.parent and not span.parent.is_remote: self._children_spans[span.parent.span_id].append(span) else: # if have a root span ending, we build a transaction and send it diff --git a/sentry_sdk/integrations/opentelemetry/scope.py b/sentry_sdk/integrations/opentelemetry/scope.py index 50a7e45b01..01bf693611 100644 --- a/sentry_sdk/integrations/opentelemetry/scope.py +++ b/sentry_sdk/integrations/opentelemetry/scope.py @@ -2,8 +2,10 @@ from contextlib import contextmanager from opentelemetry.context import get_value, set_value, attach, detach, get_current +from opentelemetry.trace import SpanContext, NonRecordingSpan, TraceFlags, use_span from sentry_sdk.scope import Scope, ScopeType +from sentry_sdk.tracing import POTelSpan from sentry_sdk.integrations.opentelemetry.consts import ( SENTRY_SCOPES_KEY, SENTRY_FORK_ISOLATION_SCOPE_KEY, @@ -14,6 +16,8 @@ if TYPE_CHECKING: from typing import Tuple, Optional, Generator, Dict, Any + from sentry_sdk._types import SamplingContext + class PotelScope(Scope): @classmethod @@ -61,10 +65,43 @@ def _get_isolation_scope(cls): @contextmanager def continue_trace(self, environ_or_headers): # type: (Dict[str, Any]) -> Generator[None, None, None] - with new_scope() as scope: - scope.generate_propagation_context(environ_or_headers) - # TODO-neel-potel add remote span on context + self.generate_propagation_context(environ_or_headers) + + span_context = self._incoming_otel_span_context() + if span_context is None: yield + else: + with use_span(NonRecordingSpan(span_context)): + yield + + def _incoming_otel_span_context(self): + # type: () -> Optional[SpanContext] + if self._propagation_context is None: + return None + # If sentry-trace extraction didn't have a parent_span_id, we don't have an upstream header + if self._propagation_context.parent_span_id is None: + return None + + trace_flags = TraceFlags( + TraceFlags.SAMPLED + if self._propagation_context.parent_sampled + else TraceFlags.DEFAULT + ) + + # TODO-neel-potel tracestate + span_context = SpanContext( + trace_id=int(self._propagation_context.trace_id, 16), # type: ignore + span_id=int(self._propagation_context.parent_span_id, 16), # type: ignore + is_remote=True, + trace_flags=trace_flags, + ) + + return span_context + + def start_span(self, custom_sampling_context=None, **kwargs): + # type: (Optional[SamplingContext], Any) -> POTelSpan + # TODO-neel-potel ideally want to remove the span argument, discuss with ivana + return POTelSpan(**kwargs, scope=self) _INITIAL_CURRENT_SCOPE = PotelScope(ty=ScopeType.CURRENT) diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py index 43a17e44ff..66a3a7b7c7 100644 --- a/sentry_sdk/tracing.py +++ b/sentry_sdk/tracing.py @@ -3,6 +3,7 @@ from datetime import datetime, timedelta, timezone from opentelemetry import trace as otel_trace, context +from opentelemetry.trace import format_trace_id, format_span_id from opentelemetry.trace.status import StatusCode import sentry_sdk @@ -1346,13 +1347,13 @@ def parent_span_id(self): @property def trace_id(self): - # type: () -> Optional[str] - return self._otel_span.get_span_context().trace_id + # type: () -> str + return format_trace_id(self._otel_span.get_span_context().trace_id) @property def span_id(self): - # type: () -> Optional[str] - return self._otel_span.get_span_context().span_id + # type: () -> str + return format_span_id(self._otel_span.get_span_context().span_id) @property def sampled(self): From 963afefa85a7dede702df9973931053b7655cd9c Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Fri, 30 Aug 2024 13:58:03 +0200 Subject: [PATCH 027/244] Add migration line for start_span no span arg --- MIGRATION_GUIDE.md | 1 + 1 file changed, 1 insertion(+) diff --git a/MIGRATION_GUIDE.md b/MIGRATION_GUIDE.md index 9f802d1933..a587a6b827 100644 --- a/MIGRATION_GUIDE.md +++ b/MIGRATION_GUIDE.md @@ -11,6 +11,7 @@ Looking to upgrade from Sentry SDK 2.x to 3.x? Here's a comprehensive list of wh - The SDK now supports Python 3.7 and higher. - `sentry_sdk.start_span` now only takes keyword arguments. +- `sentry_sdk.start_span` no longer takes an explicit `span` argument. - The `Span()` constructor does not accept a `hub` parameter anymore. - `Span.finish()` does not accept a `hub` parameter anymore. - The `Profile()` constructor does not accept a `hub` parameter anymore. From 7c4a3a09875c5f90c3a376beadd3520931bce266 Mon Sep 17 00:00:00 2001 From: Ivana Kellyer Date: Fri, 30 Aug 2024 14:48:39 +0200 Subject: [PATCH 028/244] Restore original scope (#3485) * wip * cleanup * . --- .../integrations/opentelemetry/scope.py | 17 +++- sentry_sdk/scope.py | 80 +++++++++---------- 2 files changed, 49 insertions(+), 48 deletions(-) diff --git a/sentry_sdk/integrations/opentelemetry/scope.py b/sentry_sdk/integrations/opentelemetry/scope.py index 01bf693611..1d03d67c35 100644 --- a/sentry_sdk/integrations/opentelemetry/scope.py +++ b/sentry_sdk/integrations/opentelemetry/scope.py @@ -4,19 +4,20 @@ from opentelemetry.context import get_value, set_value, attach, detach, get_current from opentelemetry.trace import SpanContext, NonRecordingSpan, TraceFlags, use_span -from sentry_sdk.scope import Scope, ScopeType -from sentry_sdk.tracing import POTelSpan from sentry_sdk.integrations.opentelemetry.consts import ( SENTRY_SCOPES_KEY, SENTRY_FORK_ISOLATION_SCOPE_KEY, ) - +from sentry_sdk.scope import Scope, ScopeType +from sentry_sdk.tracing import POTelSpan from sentry_sdk._types import TYPE_CHECKING if TYPE_CHECKING: from typing import Tuple, Optional, Generator, Dict, Any + from typing_extensions import Unpack from sentry_sdk._types import SamplingContext + from sentry_sdk.tracing import TransactionKwargs class PotelScope(Scope): @@ -98,9 +99,17 @@ def _incoming_otel_span_context(self): return span_context + def start_transaction(self, custom_sampling_context=None, **kwargs): + # type: (Optional[SamplingContext], Unpack[TransactionKwargs]) -> POTelSpan + """ + .. deprecated:: 3.0.0 + This function is deprecated and will be removed in a future release. + Use :py:meth:`sentry_sdk.start_span` instead. + """ + return self.start_span(custom_sampling_context=custom_sampling_context) + def start_span(self, custom_sampling_context=None, **kwargs): # type: (Optional[SamplingContext], Any) -> POTelSpan - # TODO-neel-potel ideally want to remove the span argument, discuss with ivana return POTelSpan(**kwargs, scope=self) diff --git a/sentry_sdk/scope.py b/sentry_sdk/scope.py index 783a099b98..688aff9d52 100644 --- a/sentry_sdk/scope.py +++ b/sentry_sdk/scope.py @@ -25,7 +25,6 @@ NoOpSpan, Span, Transaction, - POTelSpan, ) from sentry_sdk.utils import ( capture_internal_exception, @@ -696,13 +695,18 @@ def fingerprint(self, value): def transaction(self): # type: () -> Any # would be type: () -> Optional[Transaction], see https://github.com/python/mypy/issues/3004 - """ - Return the transaction (root span) in the scope, if any. + """Return the transaction (root span) in the scope, if any.""" - .. deprecated:: 3.0.0 - This property is deprecated. Use root_span instead. - """ - return self.root_span + # there is no span/transaction on the scope + if self._span is None: + return None + + # there is an orphan span on the scope + if self._span.containing_transaction is None: + return None + # there is either a transaction (which is its own containing + # transaction) or a non-orphan span on the scope + return self._span.containing_transaction @transaction.setter def transaction(self, value): @@ -729,22 +733,6 @@ def transaction(self, value): if self._span and self._span.containing_transaction: self._span.containing_transaction.name = value - @property - def root_span(self): - # type: () -> POTelSpan - """Return the root span in the scope, if any.""" - - # there is no span on the scope - if self._span is None: - return None - - # this is a root span - if self._span.root_span is None: - return self._span - - # get the topmost parent - return self._span.root_span - def set_transaction_name(self, name, source=None): # type: (str, Optional[str]) -> None """Set the transaction name and optionally the transaction source.""" @@ -953,10 +941,6 @@ def start_transaction( ): # type: (Optional[Transaction], Optional[SamplingContext], Unpack[TransactionKwargs]) -> Union[Transaction, NoOpSpan] """ - .. deprecated:: 3.0.0 - This function is deprecated and will be removed in a future release. - Use :py:meth:`sentry_sdk.start_span` instead. - Start and return a transaction. Start an existing transaction if given, otherwise create and start a new @@ -987,12 +971,14 @@ def start_transaction( """ kwargs.setdefault("scope", self) + client = self.get_client() + try_autostart_continuous_profiler() custom_sampling_context = custom_sampling_context or {} # if we haven't been given a transaction, make one - transaction = transaction or POTelSpan(**kwargs) + transaction = Transaction(**kwargs) # use traces_sample_rate, traces_sampler, and/or inheritance to make a # sampling decision @@ -1011,10 +997,15 @@ def start_transaction( transaction._profile = profile + # we don't bother to keep spans if we already know we're not going to + # send the transaction + max_spans = (client.options["_experiments"].get("max_spans")) or 1000 + transaction.init_span_recorder(maxlen=max_spans) + return transaction - def start_span(self, span=None, custom_sampling_context=None, **kwargs): - # type: (Optional[Span], Optional[SamplingContext], Any) -> Span + def start_span(self, **kwargs): + # type: (Optional[Span], Any) -> Span """ Start a span whose parent is the currently active span, if any. @@ -1024,24 +1015,25 @@ def start_span(self, span=None, custom_sampling_context=None, **kwargs): For supported `**kwargs` see :py:class:`sentry_sdk.tracing.Span`. """ - kwargs.setdefault("scope", self) + with new_scope(): + kwargs.setdefault("scope", self) - # get current span or transaction - span = span or self.span or self.get_isolation_scope().span + # get current span or transaction + span = self.span or self.get_isolation_scope().span - if span is None: - # New spans get the `trace_id` from the scope - if "trace_id" not in kwargs: - propagation_context = self.get_active_propagation_context() - if propagation_context is not None: - kwargs["trace_id"] = propagation_context.trace_id + if span is None: + # New spans get the `trace_id` from the scope + if "trace_id" not in kwargs: + propagation_context = self.get_active_propagation_context() + if propagation_context is not None: + kwargs["trace_id"] = propagation_context.trace_id - span = POTelSpan(**kwargs) - else: - # Children take `trace_id`` from the parent span. - span = span.start_child(**kwargs) + span = Span(**kwargs) + else: + # Children take `trace_id`` from the parent span. + span = span.start_child(**kwargs) - return span + return span def continue_trace( self, environ_or_headers, op=None, name=None, source=None, origin=None From 245195e39481f07259b0672057899984ebe0218c Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Mon, 2 Sep 2024 16:01:05 +0200 Subject: [PATCH 029/244] Update integrations with new continue_trace callback usage (#3486) * Update integrations with new continue_trace callback usage * aiohttp * asgi * aws_lambda * celery * gcp * huey * rq * sanic - this uses manual enter/exit on the contextmanager so we'll have to test properly * tornado * ray --- sentry_sdk/integrations/_asgi_common.py | 2 +- sentry_sdk/integrations/aiohttp.py | 54 ++++++------ sentry_sdk/integrations/asgi.py | 95 +++++++++------------- sentry_sdk/integrations/aws_lambda.py | 54 ++++++------ sentry_sdk/integrations/celery/__init__.py | 44 ++++------ sentry_sdk/integrations/gcp.py | 71 ++++++++-------- sentry_sdk/integrations/huey.py | 25 +++--- sentry_sdk/integrations/ray.py | 36 ++++---- sentry_sdk/integrations/rq.py | 29 +++---- sentry_sdk/integrations/sanic.py | 13 +-- sentry_sdk/integrations/tornado.py | 30 +++---- sentry_sdk/integrations/wsgi.py | 3 +- 12 files changed, 200 insertions(+), 256 deletions(-) diff --git a/sentry_sdk/integrations/_asgi_common.py b/sentry_sdk/integrations/_asgi_common.py index c16bbbcfe8..ca030d6f45 100644 --- a/sentry_sdk/integrations/_asgi_common.py +++ b/sentry_sdk/integrations/_asgi_common.py @@ -21,7 +21,7 @@ def _get_headers(asgi_scope): Extract headers from the ASGI scope, in the format that the Sentry protocol expects. """ headers = {} # type: Dict[str, str] - for raw_key, raw_value in asgi_scope["headers"]: + for raw_key, raw_value in asgi_scope.get("headers", {}): key = raw_key.decode("latin-1") value = raw_value.decode("latin-1") if key in headers: diff --git a/sentry_sdk/integrations/aiohttp.py b/sentry_sdk/integrations/aiohttp.py index 33f2fc095c..307f71fee3 100644 --- a/sentry_sdk/integrations/aiohttp.py +++ b/sentry_sdk/integrations/aiohttp.py @@ -2,7 +2,6 @@ import weakref import sentry_sdk -from sentry_sdk.api import continue_trace from sentry_sdk.consts import OP, SPANSTATUS, SPANDATA from sentry_sdk.integrations import Integration, DidNotEnable from sentry_sdk.integrations.logging import ignore_logger @@ -113,34 +112,31 @@ async def sentry_app_handle(self, request, *args, **kwargs): scope.add_event_processor(_make_request_processor(weak_request)) headers = dict(request.headers) - transaction = continue_trace( - headers, - op=OP.HTTP_SERVER, - # If this transaction name makes it to the UI, AIOHTTP's - # URL resolver did not find a route or died trying. - name="generic AIOHTTP request", - source=TRANSACTION_SOURCE_ROUTE, - origin=AioHttpIntegration.origin, - ) - with sentry_sdk.start_transaction( - transaction, - custom_sampling_context={"aiohttp_request": request}, - ): - try: - response = await old_handle(self, request) - except HTTPException as e: - transaction.set_http_status(e.status_code) - raise - except (asyncio.CancelledError, ConnectionResetError): - transaction.set_status(SPANSTATUS.CANCELLED) - raise - except Exception: - # This will probably map to a 500 but seems like we - # have no way to tell. Do not set span status. - reraise(*_capture_exception()) - - transaction.set_http_status(response.status) - return response + with sentry_sdk.continue_trace(headers): + with sentry_sdk.start_transaction( + op=OP.HTTP_SERVER, + # If this transaction name makes it to the UI, AIOHTTP's + # URL resolver did not find a route or died trying. + name="generic AIOHTTP request", + source=TRANSACTION_SOURCE_ROUTE, + origin=AioHttpIntegration.origin, + custom_sampling_context={"aiohttp_request": request}, + ) as transaction: + try: + response = await old_handle(self, request) + except HTTPException as e: + transaction.set_http_status(e.status_code) + raise + except (asyncio.CancelledError, ConnectionResetError): + transaction.set_status(SPANSTATUS.CANCELLED) + raise + except Exception: + # This will probably map to a 500 but seems like we + # have no way to tell. Do not set span status. + reraise(*_capture_exception()) + + transaction.set_http_status(response.status) + return response Application._handle = sentry_app_handle diff --git a/sentry_sdk/integrations/asgi.py b/sentry_sdk/integrations/asgi.py index 474a4a40cd..8aac3226d6 100644 --- a/sentry_sdk/integrations/asgi.py +++ b/sentry_sdk/integrations/asgi.py @@ -10,7 +10,6 @@ from functools import partial import sentry_sdk -from sentry_sdk.api import continue_trace from sentry_sdk.consts import OP from sentry_sdk.integrations._asgi_common import ( @@ -34,7 +33,6 @@ transaction_from_function, _get_installed_modules, ) -from sentry_sdk.tracing import Transaction from typing import TYPE_CHECKING @@ -185,66 +183,47 @@ async def _run_app(self, scope, receive, send, asgi_version): scope, ) - if ty in ("http", "websocket"): - transaction = continue_trace( - _get_headers(scope), - op="{}.server".format(ty), + with sentry_sdk.continue_trace(_get_headers(scope)): + with sentry_sdk.start_transaction( + op=( + OP.WEBSOCKET_SERVER + if ty == "websocket" + else OP.HTTP_SERVER + ), name=transaction_name, source=transaction_source, origin=self.span_origin, - ) - logger.debug( - "[ASGI] Created transaction (continuing trace): %s", - transaction, - ) - else: - transaction = Transaction( - op=OP.HTTP_SERVER, - name=transaction_name, - source=transaction_source, - origin=self.span_origin, - ) - logger.debug( - "[ASGI] Created transaction (new): %s", transaction - ) - - transaction.set_tag("asgi.type", ty) - logger.debug( - "[ASGI] Set transaction name and source on transaction: '%s' / '%s'", - transaction.name, - transaction.source, - ) - - with sentry_sdk.start_transaction( - transaction, - custom_sampling_context={"asgi_scope": scope}, - ): - logger.debug("[ASGI] Started transaction: %s", transaction) - try: - - async def _sentry_wrapped_send(event): - # type: (Dict[str, Any]) -> Any - is_http_response = ( - event.get("type") == "http.response.start" - and transaction is not None - and "status" in event - ) - if is_http_response: - transaction.set_http_status(event["status"]) - - return await send(event) - - if asgi_version == 2: - return await self.app(scope)( - receive, _sentry_wrapped_send - ) - else: - return await self.app( - scope, receive, _sentry_wrapped_send + custom_sampling_context={"asgi_scope": scope}, + ) as transaction: + logger.debug("[ASGI] Started transaction: %s", transaction) + transaction.set_tag("asgi.type", ty) + try: + + async def _sentry_wrapped_send(event): + # type: (Dict[str, Any]) -> Any + is_http_response = ( + event.get("type") == "http.response.start" + and transaction is not None + and "status" in event + ) + if is_http_response: + transaction.set_http_status(event["status"]) + + return await send(event) + + if asgi_version == 2: + return await self.app(scope)( + receive, _sentry_wrapped_send + ) + else: + return await self.app( + scope, receive, _sentry_wrapped_send + ) + except Exception as exc: + _capture_exception( + exc, mechanism_type=self.mechanism_type ) - except Exception as exc: - _capture_exception(exc, mechanism_type=self.mechanism_type) - raise exc from None + raise exc from None finally: _asgi_middleware_applied.set(False) diff --git a/sentry_sdk/integrations/aws_lambda.py b/sentry_sdk/integrations/aws_lambda.py index 168b8061aa..a090662608 100644 --- a/sentry_sdk/integrations/aws_lambda.py +++ b/sentry_sdk/integrations/aws_lambda.py @@ -4,7 +4,6 @@ from os import environ import sentry_sdk -from sentry_sdk.api import continue_trace from sentry_sdk.consts import OP from sentry_sdk.scope import should_send_default_pii from sentry_sdk.tracing import TRANSACTION_SOURCE_COMPONENT @@ -135,34 +134,31 @@ def sentry_handler(aws_event, aws_context, *args, **kwargs): if not isinstance(headers, dict): headers = {} - transaction = continue_trace( - headers, - op=OP.FUNCTION_AWS, - name=aws_context.function_name, - source=TRANSACTION_SOURCE_COMPONENT, - origin=AwsLambdaIntegration.origin, - ) - with sentry_sdk.start_transaction( - transaction, - custom_sampling_context={ - "aws_event": aws_event, - "aws_context": aws_context, - }, - ): - try: - return handler(aws_event, aws_context, *args, **kwargs) - except Exception: - exc_info = sys.exc_info() - sentry_event, hint = event_from_exception( - exc_info, - client_options=client.options, - mechanism={"type": "aws_lambda", "handled": False}, - ) - sentry_sdk.capture_event(sentry_event, hint=hint) - reraise(*exc_info) - finally: - if timeout_thread: - timeout_thread.stop() + with sentry_sdk.continue_trace(headers): + with sentry_sdk.start_transaction( + op=OP.FUNCTION_AWS, + name=aws_context.function_name, + source=TRANSACTION_SOURCE_COMPONENT, + origin=AwsLambdaIntegration.origin, + custom_sampling_context={ + "aws_event": aws_event, + "aws_context": aws_context, + }, + ): + try: + return handler(aws_event, aws_context, *args, **kwargs) + except Exception: + exc_info = sys.exc_info() + sentry_event, hint = event_from_exception( + exc_info, + client_options=client.options, + mechanism={"type": "aws_lambda", "handled": False}, + ) + sentry_sdk.capture_event(sentry_event, hint=hint) + reraise(*exc_info) + finally: + if timeout_thread: + timeout_thread.stop() return sentry_handler # type: ignore diff --git a/sentry_sdk/integrations/celery/__init__.py b/sentry_sdk/integrations/celery/__init__.py index 5b8a90fdb9..72483209e3 100644 --- a/sentry_sdk/integrations/celery/__init__.py +++ b/sentry_sdk/integrations/celery/__init__.py @@ -4,7 +4,6 @@ import sentry_sdk from sentry_sdk import isolation_scope -from sentry_sdk.api import continue_trace from sentry_sdk.consts import OP, SPANSTATUS, SPANDATA from sentry_sdk.integrations import Integration, DidNotEnable from sentry_sdk.integrations.celery.beat import ( @@ -301,38 +300,27 @@ def _inner(*args, **kwargs): scope.clear_breadcrumbs() scope.add_event_processor(_make_event_processor(task, *args, **kwargs)) - transaction = None - # Celery task objects are not a thing to be trusted. Even # something such as attribute access can fail. - with capture_internal_exceptions(): - headers = args[3].get("headers") or {} - transaction = continue_trace( - headers, + headers = args[3].get("headers") or {} + with sentry_sdk.continue_trace(headers): + with sentry_sdk.start_transaction( op=OP.QUEUE_TASK_CELERY, - name="unknown celery task", + name=task.name, source=TRANSACTION_SOURCE_TASK, origin=CeleryIntegration.origin, - ) - transaction.name = task.name - transaction.set_status(SPANSTATUS.OK) - - if transaction is None: - return f(*args, **kwargs) - - with sentry_sdk.start_transaction( - transaction, - custom_sampling_context={ - "celery_job": { - "task": task.name, - # for some reason, args[1] is a list if non-empty but a - # tuple if empty - "args": list(args[1]), - "kwargs": args[2], - } - }, - ): - return f(*args, **kwargs) + custom_sampling_context={ + "celery_job": { + "task": task.name, + # for some reason, args[1] is a list if non-empty but a + # tuple if empty + "args": list(args[1]), + "kwargs": args[2], + } + }, + ) as transaction: + transaction.set_status(SPANSTATUS.OK) + return f(*args, **kwargs) return _inner # type: ignore diff --git a/sentry_sdk/integrations/gcp.py b/sentry_sdk/integrations/gcp.py index 688d0de4d4..b977826516 100644 --- a/sentry_sdk/integrations/gcp.py +++ b/sentry_sdk/integrations/gcp.py @@ -4,7 +4,6 @@ from os import environ import sentry_sdk -from sentry_sdk.api import continue_trace from sentry_sdk.consts import OP from sentry_sdk.integrations import Integration from sentry_sdk.integrations._wsgi_common import _filter_headers @@ -82,42 +81,40 @@ def sentry_func(functionhandler, gcp_event, *args, **kwargs): if hasattr(gcp_event, "headers"): headers = gcp_event.headers - transaction = continue_trace( - headers, - op=OP.FUNCTION_GCP, - name=environ.get("FUNCTION_NAME", ""), - source=TRANSACTION_SOURCE_COMPONENT, - origin=GcpIntegration.origin, - ) - sampling_context = { - "gcp_env": { - "function_name": environ.get("FUNCTION_NAME"), - "function_entry_point": environ.get("ENTRY_POINT"), - "function_identity": environ.get("FUNCTION_IDENTITY"), - "function_region": environ.get("FUNCTION_REGION"), - "function_project": environ.get("GCP_PROJECT"), - }, - "gcp_event": gcp_event, - } - with sentry_sdk.start_transaction( - transaction, custom_sampling_context=sampling_context - ): - try: - return func(functionhandler, gcp_event, *args, **kwargs) - except Exception: - exc_info = sys.exc_info() - sentry_event, hint = event_from_exception( - exc_info, - client_options=client.options, - mechanism={"type": "gcp", "handled": False}, - ) - sentry_sdk.capture_event(sentry_event, hint=hint) - reraise(*exc_info) - finally: - if timeout_thread: - timeout_thread.stop() - # Flush out the event queue - client.flush() + with sentry_sdk.continue_trace(headers): + sampling_context = { + "gcp_env": { + "function_name": environ.get("FUNCTION_NAME"), + "function_entry_point": environ.get("ENTRY_POINT"), + "function_identity": environ.get("FUNCTION_IDENTITY"), + "function_region": environ.get("FUNCTION_REGION"), + "function_project": environ.get("GCP_PROJECT"), + }, + "gcp_event": gcp_event, + } + with sentry_sdk.start_transaction( + op=OP.FUNCTION_GCP, + name=environ.get("FUNCTION_NAME", ""), + source=TRANSACTION_SOURCE_COMPONENT, + origin=GcpIntegration.origin, + custom_sampling_context=sampling_context, + ): + try: + return func(functionhandler, gcp_event, *args, **kwargs) + except Exception: + exc_info = sys.exc_info() + sentry_event, hint = event_from_exception( + exc_info, + client_options=client.options, + mechanism={"type": "gcp", "handled": False}, + ) + sentry_sdk.capture_event(sentry_event, hint=hint) + reraise(*exc_info) + finally: + if timeout_thread: + timeout_thread.stop() + # Flush out the event queue + client.flush() return sentry_func # type: ignore diff --git a/sentry_sdk/integrations/huey.py b/sentry_sdk/integrations/huey.py index 98fab46711..7aa3cbf490 100644 --- a/sentry_sdk/integrations/huey.py +++ b/sentry_sdk/integrations/huey.py @@ -2,7 +2,7 @@ from datetime import datetime import sentry_sdk -from sentry_sdk.api import continue_trace, get_baggage, get_traceparent +from sentry_sdk.api import get_baggage, get_traceparent from sentry_sdk.consts import OP, SPANSTATUS from sentry_sdk.integrations import DidNotEnable, Integration from sentry_sdk.scope import should_send_default_pii @@ -153,22 +153,19 @@ def _sentry_execute(self, task, timestamp=None): scope.clear_breadcrumbs() scope.add_event_processor(_make_event_processor(task)) - sentry_headers = task.kwargs.pop("sentry_headers", None) - - transaction = continue_trace( - sentry_headers or {}, - name=task.name, - op=OP.QUEUE_TASK_HUEY, - source=TRANSACTION_SOURCE_TASK, - origin=HueyIntegration.origin, - ) - transaction.set_status(SPANSTATUS.OK) - if not getattr(task, "_sentry_is_patched", False): task.execute = _wrap_task_execute(task.execute) task._sentry_is_patched = True - with sentry_sdk.start_transaction(transaction): - return old_execute(self, task, timestamp) + sentry_headers = task.kwargs.pop("sentry_headers", {}) + with sentry_sdk.continue_trace(sentry_headers): + with sentry_sdk.start_transaction( + name=task.name, + op=OP.QUEUE_TASK_HUEY, + source=TRANSACTION_SOURCE_TASK, + origin=HueyIntegration.origin, + ) as transaction: + transaction.set_status(SPANSTATUS.OK) + return old_execute(self, task, timestamp) Huey._execute = _sentry_execute diff --git a/sentry_sdk/integrations/ray.py b/sentry_sdk/integrations/ray.py index bafd42c8d6..b18017ee7f 100644 --- a/sentry_sdk/integrations/ray.py +++ b/sentry_sdk/integrations/ray.py @@ -58,25 +58,23 @@ def _f(*f_args, _tracing=None, **f_kwargs): """ _check_sentry_initialized() - transaction = sentry_sdk.continue_trace( - _tracing or {}, - op=OP.QUEUE_TASK_RAY, - name=qualname_from_function(f), - origin=RayIntegration.origin, - source=TRANSACTION_SOURCE_TASK, - ) - - with sentry_sdk.start_transaction(transaction) as transaction: - try: - result = f(*f_args, **f_kwargs) - transaction.set_status(SPANSTATUS.OK) - except Exception: - transaction.set_status(SPANSTATUS.INTERNAL_ERROR) - exc_info = sys.exc_info() - _capture_exception(exc_info) - reraise(*exc_info) - - return result + with sentry_sdk.continue_trace(_tracing or {}): + with sentry_sdk.start_transaction( + op=OP.QUEUE_TASK_RAY, + name=qualname_from_function(f) or "unknown Ray function", + origin=RayIntegration.origin, + source=TRANSACTION_SOURCE_TASK, + ) as transaction: + try: + result = f(*f_args, **f_kwargs) + transaction.set_status(SPANSTATUS.OK) + except Exception: + transaction.set_status(SPANSTATUS.INTERNAL_ERROR) + exc_info = sys.exc_info() + _capture_exception(exc_info) + reraise(*exc_info) + + return result rv = old_remote(_f, *args, *kwargs) old_remote_method = rv.remote diff --git a/sentry_sdk/integrations/rq.py b/sentry_sdk/integrations/rq.py index c0df1c5e53..7e84b15681 100644 --- a/sentry_sdk/integrations/rq.py +++ b/sentry_sdk/integrations/rq.py @@ -2,7 +2,6 @@ import sentry_sdk from sentry_sdk.consts import OP -from sentry_sdk.api import continue_trace from sentry_sdk.integrations import DidNotEnable, Integration from sentry_sdk.integrations.logging import ignore_logger from sentry_sdk.tracing import TRANSACTION_SOURCE_TASK @@ -59,22 +58,20 @@ def sentry_patched_perform_job(self, job, *args, **kwargs): scope.clear_breadcrumbs() scope.add_event_processor(_make_event_processor(weakref.ref(job))) - transaction = continue_trace( - job.meta.get("_sentry_trace_headers") or {}, - op=OP.QUEUE_TASK_RQ, - name="unknown RQ task", - source=TRANSACTION_SOURCE_TASK, - origin=RqIntegration.origin, - ) - - with capture_internal_exceptions(): - transaction.name = job.func_name - - with sentry_sdk.start_transaction( - transaction, - custom_sampling_context={"rq_job": job}, + with sentry_sdk.continue_trace( + job.meta.get("_sentry_trace_headers") or {} ): - rv = old_perform_job(self, job, *args, **kwargs) + with sentry_sdk.start_transaction( + op=OP.QUEUE_TASK_RQ, + name="unknown RQ task", + source=TRANSACTION_SOURCE_TASK, + origin=RqIntegration.origin, + custom_sampling_context={"rq_job": job}, + ) as transaction: + with capture_internal_exceptions(): + transaction.name = job.func_name + + rv = old_perform_job(self, job, *args, **kwargs) if self.is_horse: # We're inside of a forked process and RQ is diff --git a/sentry_sdk/integrations/sanic.py b/sentry_sdk/integrations/sanic.py index 408216329f..3dbc556557 100644 --- a/sentry_sdk/integrations/sanic.py +++ b/sentry_sdk/integrations/sanic.py @@ -4,7 +4,6 @@ from urllib.parse import urlsplit import sentry_sdk -from sentry_sdk import continue_trace from sentry_sdk.consts import OP from sentry_sdk.integrations import Integration, DidNotEnable from sentry_sdk.integrations._wsgi_common import RequestExtractor, _filter_headers @@ -193,16 +192,17 @@ async def _context_enter(request): scope.clear_breadcrumbs() scope.add_event_processor(_make_request_processor(weak_request)) - transaction = continue_trace( - dict(request.headers), + # TODO-neel-potel test if this works + request.ctx._sentry_continue_trace = sentry_sdk.continue_trace( + dict(request.headers) + ) + request.ctx._sentry_continue_trace.__enter__() + request.ctx._sentry_transaction = sentry_sdk.start_transaction( op=OP.HTTP_SERVER, # Unless the request results in a 404 error, the name and source will get overwritten in _set_transaction name=request.path, source=TRANSACTION_SOURCE_URL, origin=SanicIntegration.origin, - ) - request.ctx._sentry_transaction = sentry_sdk.start_transaction( - transaction ).__enter__() @@ -227,6 +227,7 @@ async def _context_exit(request, response=None): and response_status not in integration._unsampled_statuses ) request.ctx._sentry_transaction.__exit__(None, None, None) + request.ctx._sentry_continue_trace.__exit__(None, None, None) request.ctx._sentry_scope.__exit__(None, None, None) diff --git a/sentry_sdk/integrations/tornado.py b/sentry_sdk/integrations/tornado.py index f1bd196261..21532fbba5 100644 --- a/sentry_sdk/integrations/tornado.py +++ b/sentry_sdk/integrations/tornado.py @@ -3,7 +3,6 @@ from inspect import iscoroutinefunction import sentry_sdk -from sentry_sdk.api import continue_trace from sentry_sdk.consts import OP from sentry_sdk.scope import should_send_default_pii from sentry_sdk.tracing import ( @@ -115,22 +114,19 @@ def _handle_request_impl(self): processor = _make_event_processor(weak_handler) scope.add_event_processor(processor) - transaction = continue_trace( - headers, - op=OP.HTTP_SERVER, - # Like with all other integrations, this is our - # fallback transaction in case there is no route. - # sentry_urldispatcher_resolve is responsible for - # setting a transaction name later. - name="generic Tornado request", - source=TRANSACTION_SOURCE_ROUTE, - origin=TornadoIntegration.origin, - ) - - with sentry_sdk.start_transaction( - transaction, custom_sampling_context={"tornado_request": self.request} - ): - yield + with sentry_sdk.continue_trace(headers): + with sentry_sdk.start_transaction( + op=OP.HTTP_SERVER, + # Like with all other integrations, this is our + # fallback transaction in case there is no route. + # sentry_urldispatcher_resolve is responsible for + # setting a transaction name later. + name="generic Tornado request", + source=TRANSACTION_SOURCE_ROUTE, + origin=TornadoIntegration.origin, + custom_sampling_context={"tornado_request": self.request}, + ): + yield @ensure_integration_enabled(TornadoIntegration) diff --git a/sentry_sdk/integrations/wsgi.py b/sentry_sdk/integrations/wsgi.py index a9bc5cd90a..bfd303235e 100644 --- a/sentry_sdk/integrations/wsgi.py +++ b/sentry_sdk/integrations/wsgi.py @@ -3,7 +3,6 @@ import sentry_sdk from sentry_sdk._werkzeug import get_host, _get_headers -from sentry_sdk.api import continue_trace from sentry_sdk.consts import OP from sentry_sdk.scope import should_send_default_pii from sentry_sdk.integrations._wsgi_common import _filter_headers @@ -92,7 +91,7 @@ def __call__(self, environ, start_response): ) ) - with continue_trace(environ): + with sentry_sdk.continue_trace(environ): with sentry_sdk.start_transaction( op=OP.HTTP_SERVER, name="generic WSGI request", From f430c20e2baa14a19b107ce8a63e2d1423e5b705 Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Thu, 5 Sep 2024 13:33:26 +0200 Subject: [PATCH 030/244] Fix _merge_scopes to work with PotelScope (#3499) Fixes event processors such as WSGI request info addition. --- sentry_sdk/scope.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/sentry_sdk/scope.py b/sentry_sdk/scope.py index 688aff9d52..251fa4dadc 100644 --- a/sentry_sdk/scope.py +++ b/sentry_sdk/scope.py @@ -335,11 +335,14 @@ def _merge_scopes(self, additional_scope=None, additional_scope_kwargs=None): if additional_scope and additional_scope_kwargs: raise TypeError("cannot provide scope and kwargs") - final_scope = copy(_global_scope) if _global_scope is not None else Scope() + final_scope = self.__class__() final_scope._type = ScopeType.MERGED + global_scope = self.get_global_scope() + final_scope.update_from_scope(global_scope) + isolation_scope = self.get_isolation_scope() - final_scope.update_from_scope(isolation_scope) + final_scope.update_from_scope(self.get_isolation_scope()) current_scope = self.get_current_scope() final_scope.update_from_scope(current_scope) From 2b10d7818096762a0e3c84a8f4ca1a9dc4af443f Mon Sep 17 00:00:00 2001 From: Ivana Kellyer Date: Thu, 5 Sep 2024 13:44:21 +0200 Subject: [PATCH 031/244] Add context-friendly `use_scope`, `use_isolation_scope` (#3500) --- .../integrations/opentelemetry/consts.py | 2 ++ .../opentelemetry/contextvars_context.py | 20 +++++++++++--- .../integrations/opentelemetry/scope.py | 26 +++++++++++++++++++ 3 files changed, 44 insertions(+), 4 deletions(-) diff --git a/sentry_sdk/integrations/opentelemetry/consts.py b/sentry_sdk/integrations/opentelemetry/consts.py index 8af978f8fd..790dac15ec 100644 --- a/sentry_sdk/integrations/opentelemetry/consts.py +++ b/sentry_sdk/integrations/opentelemetry/consts.py @@ -8,6 +8,8 @@ # scope management keys SENTRY_SCOPES_KEY = create_key("sentry_scopes") SENTRY_FORK_ISOLATION_SCOPE_KEY = create_key("sentry_fork_isolation_scope") +SENTRY_USE_CURRENT_SCOPE_KEY = create_key("sentry_use_current_scope") +SENTRY_USE_ISOLATION_SCOPE_KEY = create_key("sentry_use_isolation_scope") OTEL_SENTRY_CONTEXT = "otel" SPAN_ORIGIN = "auto.otel" diff --git a/sentry_sdk/integrations/opentelemetry/contextvars_context.py b/sentry_sdk/integrations/opentelemetry/contextvars_context.py index 86fc253af8..b66b10d18a 100644 --- a/sentry_sdk/integrations/opentelemetry/contextvars_context.py +++ b/sentry_sdk/integrations/opentelemetry/contextvars_context.py @@ -5,6 +5,8 @@ from sentry_sdk.integrations.opentelemetry.consts import ( SENTRY_SCOPES_KEY, SENTRY_FORK_ISOLATION_SCOPE_KEY, + SENTRY_USE_CURRENT_SCOPE_KEY, + SENTRY_USE_ISOLATION_SCOPE_KEY, ) @@ -15,6 +17,8 @@ def attach(self, context): should_fork_isolation_scope = context.pop( SENTRY_FORK_ISOLATION_SCOPE_KEY, False ) + should_use_isolation_scope = context.pop(SENTRY_USE_ISOLATION_SCOPE_KEY, None) + should_use_current_scope = context.pop(SENTRY_USE_CURRENT_SCOPE_KEY, None) if scopes and isinstance(scopes, tuple): (current_scope, isolation_scope) = scopes @@ -22,10 +26,18 @@ def attach(self, context): current_scope = sentry_sdk.get_current_scope() isolation_scope = sentry_sdk.get_isolation_scope() - new_scope = current_scope.fork() - new_isolation_scope = ( - isolation_scope.fork() if should_fork_isolation_scope else isolation_scope - ) + if should_use_current_scope: + new_scope = should_use_current_scope + else: + new_scope = current_scope.fork() + + if should_use_isolation_scope: + new_isolation_scope = should_use_isolation_scope + elif should_fork_isolation_scope: + new_isolation_scope = isolation_scope.fork() + else: + new_isolation_scope = isolation_scope + new_scopes = (new_scope, new_isolation_scope) new_context = set_value(SENTRY_SCOPES_KEY, new_scopes, context) diff --git a/sentry_sdk/integrations/opentelemetry/scope.py b/sentry_sdk/integrations/opentelemetry/scope.py index 1d03d67c35..48782875ed 100644 --- a/sentry_sdk/integrations/opentelemetry/scope.py +++ b/sentry_sdk/integrations/opentelemetry/scope.py @@ -7,6 +7,8 @@ from sentry_sdk.integrations.opentelemetry.consts import ( SENTRY_SCOPES_KEY, SENTRY_FORK_ISOLATION_SCOPE_KEY, + SENTRY_USE_CURRENT_SCOPE_KEY, + SENTRY_USE_ISOLATION_SCOPE_KEY, ) from sentry_sdk.scope import Scope, ScopeType from sentry_sdk.tracing import POTelSpan @@ -136,3 +138,27 @@ def new_scope(): yield PotelScope.get_current_scope() finally: detach(token) + + +@contextmanager +def use_scope(scope): + # type: (Scope) -> Generator[Scope, None, None] + context = set_value(SENTRY_USE_CURRENT_SCOPE_KEY, scope) + token = attach(context) + + try: + yield scope + finally: + detach(token) + + +@contextmanager +def use_isolation_scope(isolation_scope): + # type: (Scope) -> Generator[Scope, None, None] + context = set_value(SENTRY_USE_ISOLATION_SCOPE_KEY, isolation_scope) + token = attach(context) + + try: + yield isolation_scope + finally: + detach(token) From 0764220fb499013c15c060db5cd0a5830961e0c9 Mon Sep 17 00:00:00 2001 From: Ivana Kellyer Date: Fri, 6 Sep 2024 10:36:57 +0200 Subject: [PATCH 032/244] remove 3.6-only coverage conf --- .coveragerc36 | 14 -------------- 1 file changed, 14 deletions(-) delete mode 100644 .coveragerc36 diff --git a/.coveragerc36 b/.coveragerc36 deleted file mode 100644 index 8642882ab1..0000000000 --- a/.coveragerc36 +++ /dev/null @@ -1,14 +0,0 @@ -# This is the coverage.py config for Python 3.6 -# The config for newer Python versions is in pyproject.toml. - -[run] -branch = true -omit = - /tmp/* - */tests/* - */.venv/* - - -[report] -exclude_lines = - if TYPE_CHECKING: From b8e687e76b9055fd8f7debffb71c3896e64997b7 Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Fri, 6 Sep 2024 15:20:39 +0200 Subject: [PATCH 033/244] Fix some methods in POTelSpan (#3492) * Use `ReadableSpan` checks for attribute and parent access (handles the `NonRecordingSpan` case as a result) * deleted methods `continue_from_environ`, `continue_from_headers` and `from_traceparent` from `POTelSpan`, the new interface will not support them, but we might shim them later to not break code from outside the otel stuff * replaced `continue_from_headers` with `continue_trace` in grpc --- MIGRATION_GUIDE.md | 2 + sentry_sdk/integrations/grpc/aio/server.py | 40 +++++----- sentry_sdk/integrations/grpc/server.py | 26 +++--- sentry_sdk/tracing.py | 92 +++++++--------------- 4 files changed, 63 insertions(+), 97 deletions(-) diff --git a/MIGRATION_GUIDE.md b/MIGRATION_GUIDE.md index a587a6b827..f360820dc3 100644 --- a/MIGRATION_GUIDE.md +++ b/MIGRATION_GUIDE.md @@ -29,6 +29,8 @@ Looking to upgrade from Sentry SDK 2.x to 3.x? Here's a comprehensive list of wh - Utility function `is_auto_session_tracking_enabled()` has been removed. There is no public replacement. There is a private `_is_auto_session_tracking_enabled()` (if you absolutely need this function) It accepts a `scope` parameter instead of the previously used `hub` parameter. - Utility function `is_auto_session_tracking_enabled_scope()` has been removed. There is no public replacement. There is a private `_is_auto_session_tracking_enabled()` (if you absolutely need this function) - Setting `scope.level` has been removed. Use `scope.set_level` instead. +- `span.containing_transaction` has been removed. Use `span.root_span` instead. +- `continue_from_headers`, `continue_from_environ` and `from_traceparent` have been removed, please use top-level API `sentry_sdk.continue_trace` instead.` ### Deprecated diff --git a/sentry_sdk/integrations/grpc/aio/server.py b/sentry_sdk/integrations/grpc/aio/server.py index addc6bee36..6d38e91363 100644 --- a/sentry_sdk/integrations/grpc/aio/server.py +++ b/sentry_sdk/integrations/grpc/aio/server.py @@ -2,7 +2,7 @@ from sentry_sdk.consts import OP from sentry_sdk.integrations import DidNotEnable from sentry_sdk.integrations.grpc.consts import SPAN_ORIGIN -from sentry_sdk.tracing import Transaction, TRANSACTION_SOURCE_CUSTOM +from sentry_sdk.tracing import TRANSACTION_SOURCE_CUSTOM from sentry_sdk.utils import event_from_exception from typing import TYPE_CHECKING @@ -44,26 +44,24 @@ async def wrapped(request, context): return await handler(request, context) # What if the headers are empty? - transaction = Transaction.continue_from_headers( - dict(context.invocation_metadata()), - op=OP.GRPC_SERVER, - name=name, - source=TRANSACTION_SOURCE_CUSTOM, - origin=SPAN_ORIGIN, - ) - - with sentry_sdk.start_transaction(transaction=transaction): - try: - return await handler.unary_unary(request, context) - except AbortError: - raise - except Exception as exc: - event, hint = event_from_exception( - exc, - mechanism={"type": "grpc", "handled": False}, - ) - sentry_sdk.capture_event(event, hint=hint) - raise + with sentry_sdk.continue_trace(dict(context.invocation_metadata())): + with sentry_sdk.start_transaction( + op=OP.GRPC_SERVER, + name=name, + source=TRANSACTION_SOURCE_CUSTOM, + origin=SPAN_ORIGIN, + ): + try: + return await handler.unary_unary(request, context) + except AbortError: + raise + except Exception as exc: + event, hint = event_from_exception( + exc, + mechanism={"type": "grpc", "handled": False}, + ) + sentry_sdk.capture_event(event, hint=hint) + raise elif not handler.request_streaming and handler.response_streaming: handler_factory = grpc.unary_stream_rpc_method_handler diff --git a/sentry_sdk/integrations/grpc/server.py b/sentry_sdk/integrations/grpc/server.py index a640df5e11..fb123c5ca4 100644 --- a/sentry_sdk/integrations/grpc/server.py +++ b/sentry_sdk/integrations/grpc/server.py @@ -2,7 +2,7 @@ from sentry_sdk.consts import OP from sentry_sdk.integrations import DidNotEnable from sentry_sdk.integrations.grpc.consts import SPAN_ORIGIN -from sentry_sdk.tracing import Transaction, TRANSACTION_SOURCE_CUSTOM +from sentry_sdk.tracing import TRANSACTION_SOURCE_CUSTOM from typing import TYPE_CHECKING @@ -38,19 +38,17 @@ def behavior(request, context): if name: metadata = dict(context.invocation_metadata()) - transaction = Transaction.continue_from_headers( - metadata, - op=OP.GRPC_SERVER, - name=name, - source=TRANSACTION_SOURCE_CUSTOM, - origin=SPAN_ORIGIN, - ) - - with sentry_sdk.start_transaction(transaction=transaction): - try: - return handler.unary_unary(request, context) - except BaseException as e: - raise e + with sentry_sdk.continue_trace(metadata): + with sentry_sdk.start_transaction( + op=OP.GRPC_SERVER, + name=name, + source=TRANSACTION_SOURCE_CUSTOM, + origin=SPAN_ORIGIN, + ): + try: + return handler.unary_unary(request, context) + except BaseException as e: + raise e else: return handler.unary_unary(request, context) diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py index 66a3a7b7c7..136b4c0c18 100644 --- a/sentry_sdk/tracing.py +++ b/sentry_sdk/tracing.py @@ -5,6 +5,7 @@ from opentelemetry import trace as otel_trace, context from opentelemetry.trace import format_trace_id, format_span_id from opentelemetry.trace.status import StatusCode +from opentelemetry.sdk.trace import ReadableSpan import sentry_sdk from sentry_sdk.consts import SPANSTATUS, SPANDATA @@ -37,7 +38,6 @@ R = TypeVar("R") import sentry_sdk.profiler - from sentry_sdk.scope import Scope from sentry_sdk._types import ( Event, MeasurementUnit, @@ -1198,7 +1198,6 @@ def __init__( op=None, # type: Optional[str] description=None, # type: Optional[str] status=None, # type: Optional[str] - scope=None, # type: Optional[Scope] start_timestamp=None, # type: Optional[Union[datetime, float]] origin=None, # type: Optional[str] name=None, # type: Optional[str] @@ -1218,10 +1217,9 @@ def __init__( # OTel timestamps have nanosecond precision start_timestamp = convert_to_otel_timestamp(start_timestamp) - # XXX deal with _otel_span being a NonRecordingSpan self._otel_span = tracer.start_span( description or op or "", start_time=start_timestamp - ) # XXX + ) self.origin = origin or DEFAULT_SPAN_ORIGIN self.op = op @@ -1267,12 +1265,18 @@ def __exit__(self, ty, value, tb): # XXX set status to error if unset and an exception occurred? context.detach(self._ctx_token) + def _get_attribute(self, name): + # type: (str) -> Optional[Any] + if not isinstance(self._otel_span, ReadableSpan): + return None + return self._otel_span.attributes.get(name) + @property def description(self): # type: () -> Optional[str] from sentry_sdk.integrations.opentelemetry.consts import SentrySpanAttribute - return self._otel_span.attributes.get(SentrySpanAttribute.DESCRIPTION) + return self._get_attribute(SentrySpanAttribute.DESCRIPTION) @description.setter def description(self, value): @@ -1287,7 +1291,7 @@ def origin(self): # type: () -> Optional[str] from sentry_sdk.integrations.opentelemetry.consts import SentrySpanAttribute - return self._otel_span.attributes.get(SentrySpanAttribute.ORIGIN) + return self._get_attribute(SentrySpanAttribute.ORIGIN) @origin.setter def origin(self, value): @@ -1299,7 +1303,7 @@ def origin(self, value): @property def containing_transaction(self): - # type: () -> Optional[Transaction] + # type: () -> Optional[POTelSpan] """ Get the transaction this span is a child of. @@ -1311,16 +1315,6 @@ def containing_transaction(self): ) return self.root_span - @containing_transaction.setter - def containing_transaction(self, value): - # type: (Span) -> None - """ - Set this span's transaction. - .. deprecated:: 3.0.0 - Use :func:`root_span` instead. - """ - pass - @property def root_span(self): # type: () -> Optional[POTelSpan] @@ -1329,21 +1323,22 @@ def root_span(self): # not sure if there's a way to retrieve the parent with pure otel. return None - @root_span.setter - def root_span(self, value): - pass - @property def is_root_span(self): - if isinstance(self._otel_span, otel_trace.NonRecordingSpan): - return False - - return self._otel_span.parent is None + # type: () -> bool + return ( + isinstance(self._otel_span, ReadableSpan) and self._otel_span.parent is None + ) @property def parent_span_id(self): # type: () -> Optional[str] - return self._otel_span.parent if hasattr(self._otel_span, "parent") else None + if ( + not isinstance(self._otel_span, ReadableSpan) + or self._otel_span.parent is None + ): + return None + return format_span_id(self._otel_span.parent.span_id) @property def trace_id(self): @@ -1370,7 +1365,7 @@ def op(self): # type: () -> Optional[str] from sentry_sdk.integrations.opentelemetry.consts import SentrySpanAttribute - return self._otel_span.attributes.get(SentrySpanAttribute.OP) + return self._get_attribute(SentrySpanAttribute.OP) @op.setter def op(self, value): @@ -1385,7 +1380,7 @@ def name(self): # type: () -> Optional[str] from sentry_sdk.integrations.opentelemetry.consts import SentrySpanAttribute - return self._otel_span.attributes.get(SentrySpanAttribute.NAME) + return self._get_attribute(SentrySpanAttribute.NAME) @name.setter def name(self, value): @@ -1408,6 +1403,9 @@ def source(self, value): @property def start_timestamp(self): # type: () -> Optional[datetime] + if not isinstance(self._otel_span, ReadableSpan): + return None + start_time = self._otel_span.start_time if start_time is None: return None @@ -1421,6 +1419,9 @@ def start_timestamp(self): @property def timestamp(self): # type: () -> Optional[datetime] + if not isinstance(self._otel_span, ReadableSpan): + return None + end_time = self._otel_span.end_time if end_time is None: return None @@ -1432,49 +1433,16 @@ def timestamp(self): return convert_from_otel_timestamp(end_time) def start_child(self, **kwargs): - # type: (str, **Any) -> POTelSpan + # type: (**Any) -> POTelSpan kwargs.setdefault("sampled", self.sampled) span = POTelSpan(**kwargs) return span - @classmethod - def continue_from_environ( - cls, - environ, # type: Mapping[str, str] - **kwargs, # type: Any - ): - # type: (...) -> POTelSpan - # XXX actually propagate - span = POTelSpan(**kwargs) - return span - - @classmethod - def continue_from_headers( - cls, - headers, # type: Mapping[str, str] - **kwargs, # type: Any - ): - # type: (...) -> POTelSpan - # XXX actually propagate - span = POTelSpan(**kwargs) - return span - def iter_headers(self): # type: () -> Iterator[Tuple[str, str]] pass - @classmethod - def from_traceparent( - cls, - traceparent, # type: Optional[str] - **kwargs, # type: Any - ): - # type: (...) -> Optional[Transaction] - # XXX actually propagate - span = POTelSpan(**kwargs) - return span - def to_traceparent(self): # type: () -> str if self.sampled is True: From 0e0b5b0e2d58ed8462e4bb44415592c983561f9f Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Tue, 10 Sep 2024 14:02:18 +0200 Subject: [PATCH 034/244] Add root_span implementation (#3513) * add and track underlying root span in a hidden attr `_sentry_root_otel_span` in all subsequent children spans in the span processor on start * wrap this underlying root otel span in a `POTelSpan` to act as a proxy * make `POTelSpan` constructor work with explicitly passed in `otel_span` * implement `__eq__` on `POTelSpan` to make sure proxies to the same underlying `_otel_span` are considered the same --- .../opentelemetry/potel_span_processor.py | 26 ++++++++- sentry_sdk/tracing.py | 57 +++++++++++-------- .../integrations/opentelemetry/test_potel.py | 15 +++++ 3 files changed, 72 insertions(+), 26 deletions(-) diff --git a/sentry_sdk/integrations/opentelemetry/potel_span_processor.py b/sentry_sdk/integrations/opentelemetry/potel_span_processor.py index 06376ec3e6..d61b5f8782 100644 --- a/sentry_sdk/integrations/opentelemetry/potel_span_processor.py +++ b/sentry_sdk/integrations/opentelemetry/potel_span_processor.py @@ -1,7 +1,13 @@ from collections import deque, defaultdict from typing import cast -from opentelemetry.trace import format_trace_id, format_span_id +from opentelemetry.trace import ( + format_trace_id, + format_span_id, + get_current_span, + INVALID_SPAN, + Span as TraceApiSpan, +) from opentelemetry.context import Context from opentelemetry.sdk.trace import Span, ReadableSpan, SpanProcessor @@ -44,7 +50,8 @@ def __init__(self): def on_start(self, span, parent_context=None): # type: (Span, Optional[Context]) -> None - pass + if not is_sentry_span(span): + self._add_root_span(span, get_current_span(parent_context)) def on_end(self, span): # type: (ReadableSpan) -> None @@ -68,6 +75,21 @@ def force_flush(self, timeout_millis=30000): # type: (int) -> bool return True + def _add_root_span(self, span, parent_span): + # type: (Span, TraceApiSpan) -> None + """ + This is required to make POTelSpan.root_span work + since we can't traverse back to the root purely with otel efficiently. + """ + if parent_span != INVALID_SPAN and not parent_span.get_span_context().is_remote: + # child span points to parent's root or parent + span._sentry_root_otel_span = getattr( + parent_span, "_sentry_root_otel_span", parent_span + ) + else: + # root span points to itself + span._sentry_root_otel_span = span + def _flush_root_span(self, span): # type: (ReadableSpan) -> None transaction_event = self._root_span_to_transaction_event(span) diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py index 136b4c0c18..46e4b93baa 100644 --- a/sentry_sdk/tracing.py +++ b/sentry_sdk/tracing.py @@ -3,7 +3,7 @@ from datetime import datetime, timedelta, timezone from opentelemetry import trace as otel_trace, context -from opentelemetry.trace import format_trace_id, format_span_id +from opentelemetry.trace import format_trace_id, format_span_id, Span as OtelSpan from opentelemetry.trace.status import StatusCode from opentelemetry.sdk.trace import ReadableSpan @@ -17,7 +17,7 @@ nanosecond_time, ) -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, cast if TYPE_CHECKING: from collections.abc import Callable, Mapping, MutableMapping @@ -1201,6 +1201,7 @@ def __init__( start_timestamp=None, # type: Optional[Union[datetime, float]] origin=None, # type: Optional[str] name=None, # type: Optional[str] + otel_span=None, # type: Optional[OtelSpan] **_, # type: dict[str, object] ): # type: (...) -> None @@ -1208,25 +1209,34 @@ def __init__( For backwards compatibility with old the old Span interface, this class accepts arbitrary keyword arguments, in addition to the ones explicitly listed in the signature. These additional arguments are ignored. + + If otel_span is passed explicitly, just acts as a proxy. """ - from sentry_sdk.integrations.opentelemetry.utils import ( - convert_to_otel_timestamp, - ) + if otel_span is not None: + self._otel_span = otel_span + else: + from sentry_sdk.integrations.opentelemetry.utils import ( + convert_to_otel_timestamp, + ) - if start_timestamp is not None: - # OTel timestamps have nanosecond precision - start_timestamp = convert_to_otel_timestamp(start_timestamp) + if start_timestamp is not None: + # OTel timestamps have nanosecond precision + start_timestamp = convert_to_otel_timestamp(start_timestamp) - self._otel_span = tracer.start_span( - description or op or "", start_time=start_timestamp - ) + self._otel_span = tracer.start_span( + description or op or "", start_time=start_timestamp + ) - self.origin = origin or DEFAULT_SPAN_ORIGIN - self.op = op - self.description = description - self.name = name - if status is not None: - self.set_status(status) + self.origin = origin or DEFAULT_SPAN_ORIGIN + self.op = op + self.description = description + self.name = name + if status is not None: + self.set_status(status) + + def __eq__(self, other): + # type: (POTelSpan) -> bool + return self._otel_span == other._otel_span def __repr__(self): # type: () -> str @@ -1318,17 +1328,16 @@ def containing_transaction(self): @property def root_span(self): # type: () -> Optional[POTelSpan] - # XXX implement this - # there's a span.parent property, but it returns the parent spancontext - # not sure if there's a way to retrieve the parent with pure otel. - return None + root_otel_span = cast( + "Optional[OtelSpan]", + getattr(self._otel_span, "_sentry_root_otel_span", None), + ) + return POTelSpan(otel_span=root_otel_span) if root_otel_span else None @property def is_root_span(self): # type: () -> bool - return ( - isinstance(self._otel_span, ReadableSpan) and self._otel_span.parent is None - ) + return self.root_span == self @property def parent_span_id(self): diff --git a/tests/integrations/opentelemetry/test_potel.py b/tests/integrations/opentelemetry/test_potel.py index 5e44cc3888..2b972addd1 100644 --- a/tests/integrations/opentelemetry/test_potel.py +++ b/tests/integrations/opentelemetry/test_potel.py @@ -314,3 +314,18 @@ def test_multiple_transaction_tags_isolation_scope_started_with_sentry( assert payload_a["tags"] == {"tag.global": 99, "tag.inner.a": "a"} assert payload_b["tags"] == {"tag.global": 99, "tag.inner.b": "b"} + + +def test_potel_span_root_span_references(): + with sentry_sdk.start_span(description="request") as request_span: + assert request_span.is_root_span + assert request_span.root_span == request_span + with sentry_sdk.start_span(description="db") as db_span: + assert not db_span.is_root_span + assert db_span.root_span == request_span + with sentry_sdk.start_span(description="redis") as redis_span: + assert not redis_span.is_root_span + assert redis_span.root_span == request_span + with sentry_sdk.start_span(description="http") as http_span: + assert not http_span.is_root_span + assert http_span.root_span == request_span From 3b54bbf094fa4dd041b961c622a379497886036c Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Tue, 10 Sep 2024 14:10:33 +0200 Subject: [PATCH 035/244] Potel Sampling (#3501) Add a new SentrySampler that is used for sampling OpenTelemetry spans the Sentry way (using Sentrys traces_sample_rate and traces_sampler config options) Fixes #3318 --- .../integrations/opentelemetry/consts.py | 2 + .../integrations/opentelemetry/integration.py | 3 +- .../integrations/opentelemetry/sampler.py | 122 +++++++ .../integrations/opentelemetry/scope.py | 3 + .../opentelemetry/test_sampler.py | 339 ++++++++++++++++++ 5 files changed, 468 insertions(+), 1 deletion(-) create mode 100644 sentry_sdk/integrations/opentelemetry/sampler.py create mode 100644 tests/integrations/opentelemetry/test_sampler.py diff --git a/sentry_sdk/integrations/opentelemetry/consts.py b/sentry_sdk/integrations/opentelemetry/consts.py index 790dac15ec..cb088f13a5 100644 --- a/sentry_sdk/integrations/opentelemetry/consts.py +++ b/sentry_sdk/integrations/opentelemetry/consts.py @@ -11,6 +11,8 @@ SENTRY_USE_CURRENT_SCOPE_KEY = create_key("sentry_use_current_scope") SENTRY_USE_ISOLATION_SCOPE_KEY = create_key("sentry_use_isolation_scope") +SENTRY_TRACE_STATE_DROPPED = "sentry_dropped" + OTEL_SENTRY_CONTEXT = "otel" SPAN_ORIGIN = "auto.otel" diff --git a/sentry_sdk/integrations/opentelemetry/integration.py b/sentry_sdk/integrations/opentelemetry/integration.py index 4cd969f0e0..3f71e86f02 100644 --- a/sentry_sdk/integrations/opentelemetry/integration.py +++ b/sentry_sdk/integrations/opentelemetry/integration.py @@ -12,6 +12,7 @@ from sentry_sdk.integrations.opentelemetry.contextvars_context import ( SentryContextVarsRuntimeContext, ) +from sentry_sdk.integrations.opentelemetry.sampler import SentrySampler from sentry_sdk.utils import logger try: @@ -55,7 +56,7 @@ def _setup_sentry_tracing(): opentelemetry.context._RUNTIME_CONTEXT = SentryContextVarsRuntimeContext() - provider = TracerProvider() + provider = TracerProvider(sampler=SentrySampler()) provider.add_span_processor(PotelSentrySpanProcessor()) trace.set_tracer_provider(provider) diff --git a/sentry_sdk/integrations/opentelemetry/sampler.py b/sentry_sdk/integrations/opentelemetry/sampler.py new file mode 100644 index 0000000000..445c2edd02 --- /dev/null +++ b/sentry_sdk/integrations/opentelemetry/sampler.py @@ -0,0 +1,122 @@ +from random import random + +from opentelemetry import trace + +from opentelemetry.sdk.trace.sampling import Sampler, SamplingResult, Decision +from opentelemetry.trace.span import TraceState + +import sentry_sdk +from sentry_sdk.integrations.opentelemetry.consts import SENTRY_TRACE_STATE_DROPPED +from sentry_sdk.tracing_utils import has_tracing_enabled +from sentry_sdk.utils import is_valid_sample_rate, logger + +from typing import TYPE_CHECKING, Optional, Sequence + +if TYPE_CHECKING: + from opentelemetry.context import Context + from opentelemetry.trace import Link, SpanKind + from opentelemetry.trace.span import SpanContext + from opentelemetry.util.types import Attributes + + +def get_parent_sampled(parent_context, trace_id): + # type: (Optional[SpanContext], int) -> Optional[bool] + if parent_context is None: + return None + + is_span_context_valid = parent_context is not None and parent_context.is_valid + + # Only inherit sample rate if `traceId` is the same + if is_span_context_valid and parent_context.trace_id == trace_id: + # this is getSamplingDecision in JS + if parent_context.trace_flags.sampled: + return True + + dropped = parent_context.trace_state.get(SENTRY_TRACE_STATE_DROPPED) == "true" + if dropped: + return False + + # TODO-anton: fall back to sampling decision in DSC (for this die DSC needs to be set in the trace_state) + + return None + + +def dropped(parent_context=None): + # type: (Optional[SpanContext]) -> SamplingResult + trace_state = parent_context.trace_state if parent_context is not None else None + updated_trace_context = trace_state or TraceState() + updated_trace_context = updated_trace_context.update( + SENTRY_TRACE_STATE_DROPPED, "true" + ) + return SamplingResult( + Decision.DROP, + trace_state=updated_trace_context, + ) + + +class SentrySampler(Sampler): + def should_sample( + self, + parent_context, # type: Optional[Context] + trace_id, # type: int + name, # type: str + kind=None, # type: Optional[SpanKind] + attributes=None, # type: Attributes + links=None, # type: Optional[Sequence[Link]] + trace_state=None, # type: Optional[TraceState] + ): + # type: (...) -> SamplingResult + client = sentry_sdk.get_client() + + parent_span = trace.get_current_span(parent_context) + parent_context = parent_span.get_span_context() if parent_span else None + + # No tracing enabled, thus no sampling + if not has_tracing_enabled(client.options): + return dropped(parent_context) + + sample_rate = None + + # Check if sampled=True was passed to start_transaction + # TODO-anton: Do we want to keep the start_transaction(sampled=True) thing? + + # Check if there is a traces_sampler + # Traces_sampler is responsible to check parent sampled to have full transactions. + has_traces_sampler = callable(client.options.get("traces_sampler")) + if has_traces_sampler: + # TODO-anton: Make proper sampling_context + sampling_context = { + "transaction_context": { + "name": name, + }, + "parent_sampled": get_parent_sampled(parent_context, trace_id), + } + + sample_rate = client.options["traces_sampler"](sampling_context) + + else: + # Check if there is a parent with a sampling decision + parent_sampled = get_parent_sampled(parent_context, trace_id) + if parent_sampled is not None: + sample_rate = parent_sampled + else: + # Check if there is a traces_sample_rate + sample_rate = client.options.get("traces_sample_rate") + + # If the sample rate is invalid, drop the span + if not is_valid_sample_rate(sample_rate, source=self.__class__.__name__): + logger.warning( + f"[Tracing] Discarding {name} because of invalid sample rate." + ) + return dropped(parent_context) + + # Roll the dice on sample rate + sampled = random() < float(sample_rate) + + if sampled: + return SamplingResult(Decision.RECORD_AND_SAMPLE) + else: + return dropped(parent_context) + + def get_description(self) -> str: + return self.__class__.__name__ diff --git a/sentry_sdk/integrations/opentelemetry/scope.py b/sentry_sdk/integrations/opentelemetry/scope.py index 48782875ed..40eb47c01f 100644 --- a/sentry_sdk/integrations/opentelemetry/scope.py +++ b/sentry_sdk/integrations/opentelemetry/scope.py @@ -97,6 +97,9 @@ def _incoming_otel_span_context(self): span_id=int(self._propagation_context.parent_span_id, 16), # type: ignore is_remote=True, trace_flags=trace_flags, + # TODO-anton: add trace_state (mapping[str,str]) with the parentSpanId, dsc and sampled from self._propagation_context + # trace_state={ + # } ) return span_context diff --git a/tests/integrations/opentelemetry/test_sampler.py b/tests/integrations/opentelemetry/test_sampler.py new file mode 100644 index 0000000000..dfd4981ecf --- /dev/null +++ b/tests/integrations/opentelemetry/test_sampler.py @@ -0,0 +1,339 @@ +import pytest +from unittest import mock + +from opentelemetry import trace + +import sentry_sdk + + +tracer = trace.get_tracer(__name__) + + +@pytest.mark.parametrize( + "traces_sample_rate, expected_num_of_envelopes", + [ + # special case for testing, do not pass any traces_sample_rate to init() (the default traces_sample_rate=None will be used) + (-1, 0), + # traces_sample_rate=None means do not create new traces, and also do not continue incoming traces. So, no envelopes at all. + (None, 0), + # traces_sample_rate=0 means do not create new traces (0% of the requests), but continue incoming traces. So envelopes will be created only if there is an incoming trace. + (0, 0), + # traces_sample_rate=1 means create new traces for 100% of requests (and also continue incoming traces, of course). + (1, 2), + ], +) +def test_sampling_traces_sample_rate_0_or_100( + sentry_init, + capture_envelopes, + traces_sample_rate, + expected_num_of_envelopes, +): + kwargs = {} + if traces_sample_rate != -1: + kwargs["traces_sample_rate"] = traces_sample_rate + + sentry_init(**kwargs) + + envelopes = capture_envelopes() + + with sentry_sdk.start_span(description="request a"): + with sentry_sdk.start_span(description="cache a"): + with sentry_sdk.start_span(description="db a"): + ... + + with sentry_sdk.start_span(description="request b"): + with sentry_sdk.start_span(description="cache b"): + with sentry_sdk.start_span(description="db b"): + ... + + assert len(envelopes) == expected_num_of_envelopes + + if expected_num_of_envelopes == 2: + (transaction_a, transaction_b) = [ + envelope.items[0].payload.json for envelope in envelopes + ] + + assert transaction_a["transaction"] == "request a" + assert transaction_b["transaction"] == "request b" + + spans_a = transaction_a["spans"] + assert len(spans_a) == 2 + assert spans_a[0]["description"] == "cache a" + assert spans_a[1]["description"] == "db a" + spans_b = transaction_b["spans"] + assert len(spans_b) == 2 + assert spans_b[0]["description"] == "cache b" + assert spans_b[1]["description"] == "db b" + + +def test_sampling_traces_sample_rate_50(sentry_init, capture_envelopes): + sentry_init(traces_sample_rate=0.5) + + envelopes = capture_envelopes() + + with mock.patch( + "sentry_sdk.integrations.opentelemetry.sampler.random", return_value=0.2 + ): # drop + with sentry_sdk.start_span(description="request a"): + with sentry_sdk.start_span(description="cache a"): + with sentry_sdk.start_span(description="db a"): + ... + + with mock.patch( + "sentry_sdk.integrations.opentelemetry.sampler.random", return_value=0.7 + ): # keep + with sentry_sdk.start_span(description="request b"): + with sentry_sdk.start_span(description="cache b"): + with sentry_sdk.start_span(description="db b"): + ... + + assert len(envelopes) == 1 + + (envelope,) = envelopes + transaction = envelope.items[0].payload.json + assert transaction["transaction"] == "request a" + spans = transaction["spans"] + assert len(spans) == 2 + assert spans[0]["description"] == "cache a" + assert spans[1]["description"] == "db a" + + +def test_sampling_traces_sampler(sentry_init, capture_envelopes): + def keep_only_a(sampling_context): + if " a" in sampling_context["transaction_context"]["name"]: + return 0.05 + else: + return 0 + + sentry_init( + traces_sample_rate=1.0, + traces_sampler=keep_only_a, + ) + + envelopes = capture_envelopes() + + # Make sure random() always returns the same values + with mock.patch( + "sentry_sdk.integrations.opentelemetry.sampler.random", + side_effect=[0.04 for _ in range(12)], + ): + + with sentry_sdk.start_span(description="request a"): # keep + with sentry_sdk.start_span(description="cache a"): # keep + with sentry_sdk.start_span(description="db a"): # keep + ... + + with sentry_sdk.start_span(description="request b"): # drop + with sentry_sdk.start_span(description="cache b"): # drop + with sentry_sdk.start_span(description="db b"): # drop + ... + + with sentry_sdk.start_span(description="request c"): # drop + with sentry_sdk.start_span( + description="cache a c" + ): # keep (but trx dropped, so not collected) + with sentry_sdk.start_span( + description="db a c" + ): # keep (but trx dropped, so not collected) + ... + + with sentry_sdk.start_span(description="new a c"): # keep + with sentry_sdk.start_span(description="cache c"): # drop + with sentry_sdk.start_span(description="db c"): # drop + ... + + assert len(envelopes) == 2 + (envelope1, envelope2) = envelopes + transaction1 = envelope1.items[0].payload.json + transaction2 = envelope2.items[0].payload.json + + assert transaction1["transaction"] == "request a" + assert len(transaction1["spans"]) == 2 + assert transaction2["transaction"] == "new a c" + assert len(transaction2["spans"]) == 0 + + +def test_sampling_traces_sampler_boolean(sentry_init, capture_envelopes): + def keep_only_a(sampling_context): + if " a" in sampling_context["transaction_context"]["name"]: + return True + else: + return False + + sentry_init( + traces_sample_rate=1.0, + traces_sampler=keep_only_a, + ) + + envelopes = capture_envelopes() + + with sentry_sdk.start_span(description="request a"): # keep + with sentry_sdk.start_span(description="cache a"): # keep + with sentry_sdk.start_span(description="db X"): # drop + ... + + with sentry_sdk.start_span(description="request b"): # drop + with sentry_sdk.start_span(description="cache b"): # drop + with sentry_sdk.start_span(description="db b"): # drop + ... + + assert len(envelopes) == 1 + (envelope,) = envelopes + transaction = envelope.items[0].payload.json + + assert transaction["transaction"] == "request a" + assert len(transaction["spans"]) == 1 + + +@pytest.mark.parametrize( + "traces_sample_rate, expected_num_of_envelopes", + [ + # special case for testing, do not pass any traces_sample_rate to init() (the default traces_sample_rate=None will be used) + (-1, 0), + # traces_sample_rate=None means do not create new traces, and also do not continue incoming traces. So, no envelopes at all. + (None, 0), + # traces_sample_rate=0 means do not create new traces (0% of the requests), but continue incoming traces. So envelopes will be created only if there is an incoming trace. + (0, 1), + # traces_sample_rate=1 means create new traces for 100% of requests (and also continue incoming traces, of course). + (1, 1), + ], +) +def test_sampling_parent_sampled( + sentry_init, + traces_sample_rate, + expected_num_of_envelopes, + capture_envelopes, +): + kwargs = {} + if traces_sample_rate != -1: + kwargs["traces_sample_rate"] = traces_sample_rate + + sentry_init(**kwargs) + + envelopes = capture_envelopes() + + # The upstream service has sampled the request + headers = { + "sentry-trace": "771a43a4192642f0b136d5159a501700-1234567890abcdef-1", + } + with sentry_sdk.continue_trace(headers): + with sentry_sdk.start_span(description="request a"): + with sentry_sdk.start_span(description="cache a"): + with sentry_sdk.start_span(description="db X"): + ... + + assert len(envelopes) == expected_num_of_envelopes + + if expected_num_of_envelopes == 1: + (envelope,) = envelopes + transaction = envelope.items[0].payload.json + assert transaction["transaction"] == "request a" + assert ( + transaction["contexts"]["trace"]["trace_id"] + == "771a43a4192642f0b136d5159a501700" + ) + assert transaction["contexts"]["trace"]["span_id"] != "1234567890abcdef" + assert transaction["contexts"]["trace"]["parent_span_id"] == "1234567890abcdef" + + +@pytest.mark.parametrize( + "traces_sample_rate, expected_num_of_envelopes", + [ + # special case for testing, do not pass any traces_sample_rate to init() (the default traces_sample_rate=None will be used) + (-1, 0), + # traces_sample_rate=None means do not create new traces, and also do not continue incoming traces. So, no envelopes at all. + (None, 0), + # traces_sample_rate=0 means do not create new traces (0% of the requests), but continue incoming traces. So envelopes will be created only if there is an incoming trace. + (0, 0), + # traces_sample_rate=1 means create new traces for 100% of requests (and also continue incoming traces, of course). + (1, 1), + ], +) +def test_sampling_parent_dropped( + sentry_init, + traces_sample_rate, + expected_num_of_envelopes, + capture_envelopes, +): + kwargs = {} + if traces_sample_rate != -1: + kwargs["traces_sample_rate"] = traces_sample_rate + + sentry_init(**kwargs) + + envelopes = capture_envelopes() + + # The upstream service has dropped the request + headers = { + "sentry-trace": "771a43a4192642f0b136d5159a501700-1234567890abcdef-0", + } + with sentry_sdk.continue_trace(headers): + with sentry_sdk.start_span(description="request a"): + with sentry_sdk.start_span(description="cache a"): + with sentry_sdk.start_span(description="db X"): + ... + + assert len(envelopes) == expected_num_of_envelopes + + if expected_num_of_envelopes == 1: + (envelope,) = envelopes + transaction = envelope.items[0].payload.json + assert transaction["transaction"] == "request a" + assert ( + transaction["contexts"]["trace"]["trace_id"] + == "771a43a4192642f0b136d5159a501700" + ) + assert transaction["contexts"]["trace"]["span_id"] != "1234567890abcdef" + assert transaction["contexts"]["trace"]["parent_span_id"] == "1234567890abcdef" + + +@pytest.mark.parametrize( + "traces_sample_rate, expected_num_of_envelopes", + [ + # special case for testing, do not pass any traces_sample_rate to init() (the default traces_sample_rate=None will be used) + (-1, 0), + # traces_sample_rate=None means do not create new traces, and also do not continue incoming traces. So, no envelopes at all. + (None, 0), + # traces_sample_rate=0 means do not create new traces (0% of the requests), but continue incoming traces. So envelopes will be created only if there is an incoming trace. + (0, 0), + # traces_sample_rate=1 means create new traces for 100% of requests (and also continue incoming traces, of course). + (1, 1), + ], +) +def test_sampling_parent_deferred( + sentry_init, + traces_sample_rate, + expected_num_of_envelopes, + capture_envelopes, +): + kwargs = {} + if traces_sample_rate != -1: + kwargs["traces_sample_rate"] = traces_sample_rate + + sentry_init(**kwargs) + + envelopes = capture_envelopes() + + # The upstream service has deferred the sampling decision to us. + headers = { + "sentry-trace": "771a43a4192642f0b136d5159a501700-1234567890abcdef-", + } + + with sentry_sdk.continue_trace(headers): + with sentry_sdk.start_span(description="request a"): + with sentry_sdk.start_span(description="cache a"): + with sentry_sdk.start_span(description="db X"): + ... + + assert len(envelopes) == expected_num_of_envelopes + + if expected_num_of_envelopes == 1: + (envelope,) = envelopes + transaction = envelope.items[0].payload.json + assert transaction["transaction"] == "request a" + assert ( + transaction["contexts"]["trace"]["trace_id"] + == "771a43a4192642f0b136d5159a501700" + ) + assert transaction["contexts"]["trace"]["span_id"] != "1234567890abcdef" + assert transaction["contexts"]["trace"]["parent_span_id"] == "1234567890abcdef" From 178c6a51776a8d19af7c7969a6434fd3f2f200fa Mon Sep 17 00:00:00 2001 From: Ivana Kellyer Date: Wed, 11 Sep 2024 10:01:31 +0200 Subject: [PATCH 036/244] missing import --- sentry_sdk/tracing.py | 1 + 1 file changed, 1 insertion(+) diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py index ea45700a42..82c7ede10a 100644 --- a/sentry_sdk/tracing.py +++ b/sentry_sdk/tracing.py @@ -1,5 +1,6 @@ import uuid import random +import warnings from datetime import datetime, timedelta, timezone from opentelemetry import trace as otel_trace, context From 28d8b827782fbbfb0d6bd80f62fcd20637b103c9 Mon Sep 17 00:00:00 2001 From: Ivana Kellyer Date: Wed, 11 Sep 2024 11:27:15 +0200 Subject: [PATCH 037/244] Remove 3.6 compat (#3517) * Remove 3.6 compat * fix --- scripts/init_serverless_sdk.py | 4 +- sentry_sdk/_compat.py | 1 - sentry_sdk/client.py | 35 +++------ sentry_sdk/integrations/aws_lambda.py | 87 +++++++-------------- sentry_sdk/metrics.py | 7 +- sentry_sdk/profiler/transaction_profiler.py | 7 +- sentry_sdk/scope.py | 3 +- sentry_sdk/tracing.py | 6 +- sentry_sdk/utils.py | 49 ++---------- tests/test_metrics.py | 26 ------ 10 files changed, 54 insertions(+), 171 deletions(-) diff --git a/scripts/init_serverless_sdk.py b/scripts/init_serverless_sdk.py index 9b4412c420..d58605ff6f 100644 --- a/scripts/init_serverless_sdk.py +++ b/scripts/init_serverless_sdk.py @@ -50,8 +50,8 @@ def extract_and_load_lambda_function_module(self, module_path): module_name = module_path.split(os.path.sep)[-1] module_file_path = module_path + ".py" - # Supported python versions are 3.6, 3.7, 3.8 - if py_version >= (3, 6): + # Supported python versions are 3.7, 3.8 + if py_version >= (3, 7): import importlib.util spec = importlib.util.spec_from_file_location( diff --git a/sentry_sdk/_compat.py b/sentry_sdk/_compat.py index 3df12d5534..845e52a4b4 100644 --- a/sentry_sdk/_compat.py +++ b/sentry_sdk/_compat.py @@ -9,7 +9,6 @@ T = TypeVar("T") -PY37 = sys.version_info[0] == 3 and sys.version_info[1] >= 7 PY310 = sys.version_info[0] == 3 and sys.version_info[1] >= 10 PY311 = sys.version_info[0] == 3 and sys.version_info[1] >= 11 diff --git a/sentry_sdk/client.py b/sentry_sdk/client.py index 03ed439b71..04737de981 100644 --- a/sentry_sdk/client.py +++ b/sentry_sdk/client.py @@ -7,7 +7,7 @@ from importlib import import_module from typing import cast -from sentry_sdk._compat import PY37, check_uwsgi_thread_support +from sentry_sdk._compat import check_uwsgi_thread_support from sentry_sdk.utils import ( ContextVar, capture_internal_exceptions, @@ -18,7 +18,6 @@ get_type_name, get_default_release, handle_in_app, - is_gevent, logger, ) from sentry_sdk.serializer import serialize @@ -132,14 +131,6 @@ def _get_options(*args, **kwargs): return rv -try: - # Python 3.6+ - module_not_found_error = ModuleNotFoundError -except Exception: - # Older Python versions - module_not_found_error = ImportError # type: ignore - - class BaseClient: """ .. versionadded:: 2.0.0 @@ -264,7 +255,7 @@ def _setup_instrumentation(self, functions_to_trace): function_obj = getattr(module_obj, function_name) setattr(module_obj, function_name, trace(function_obj)) logger.debug("Enabled tracing for %s", function_qualname) - except module_not_found_error: + except ModuleNotFoundError: try: # Try to import a class # ex: "mymodule.submodule.MyClassName.member_function" @@ -320,22 +311,14 @@ def _capture_envelope(envelope): self.metrics_aggregator = None # type: Optional[MetricsAggregator] experiments = self.options.get("_experiments", {}) if experiments.get("enable_metrics", True): - # Context vars are not working correctly on Python <=3.6 - # with gevent. - metrics_supported = not is_gevent() or PY37 - if metrics_supported: - from sentry_sdk.metrics import MetricsAggregator + from sentry_sdk.metrics import MetricsAggregator - self.metrics_aggregator = MetricsAggregator( - capture_func=_capture_envelope, - enable_code_locations=bool( - experiments.get("metric_code_locations", True) - ), - ) - else: - logger.info( - "Metrics not supported on Python 3.6 and lower with gevent." - ) + self.metrics_aggregator = MetricsAggregator( + capture_func=_capture_envelope, + enable_code_locations=bool( + experiments.get("metric_code_locations", True) + ), + ) max_request_body_size = ("always", "never", "small", "medium") if self.options["max_request_body_size"] not in max_request_body_size: diff --git a/sentry_sdk/integrations/aws_lambda.py b/sentry_sdk/integrations/aws_lambda.py index 9cc5095192..8374b27e8e 100644 --- a/sentry_sdk/integrations/aws_lambda.py +++ b/sentry_sdk/integrations/aws_lambda.py @@ -208,77 +208,44 @@ def setup_once(): ) return - pre_37 = hasattr(lambda_bootstrap, "handle_http_request") # Python 3.6 + lambda_bootstrap.LambdaRuntimeClient.post_init_error = _wrap_init_error( + lambda_bootstrap.LambdaRuntimeClient.post_init_error + ) - if pre_37: - old_handle_event_request = lambda_bootstrap.handle_event_request + old_handle_event_request = lambda_bootstrap.handle_event_request - def sentry_handle_event_request(request_handler, *args, **kwargs): - # type: (Any, *Any, **Any) -> Any - request_handler = _wrap_handler(request_handler) - return old_handle_event_request(request_handler, *args, **kwargs) - - lambda_bootstrap.handle_event_request = sentry_handle_event_request - - old_handle_http_request = lambda_bootstrap.handle_http_request - - def sentry_handle_http_request(request_handler, *args, **kwargs): - # type: (Any, *Any, **Any) -> Any - request_handler = _wrap_handler(request_handler) - return old_handle_http_request(request_handler, *args, **kwargs) - - lambda_bootstrap.handle_http_request = sentry_handle_http_request + def sentry_handle_event_request( # type: ignore + lambda_runtime_client, request_handler, *args, **kwargs + ): + request_handler = _wrap_handler(request_handler) + return old_handle_event_request( + lambda_runtime_client, request_handler, *args, **kwargs + ) - # Patch to_json to drain the queue. This should work even when the - # SDK is initialized inside of the handler + lambda_bootstrap.handle_event_request = sentry_handle_event_request - old_to_json = lambda_bootstrap.to_json + # Patch the runtime client to drain the queue. This should work + # even when the SDK is initialized inside of the handler - def sentry_to_json(*args, **kwargs): + def _wrap_post_function(f): + # type: (F) -> F + def inner(*args, **kwargs): # type: (*Any, **Any) -> Any _drain_queue() - return old_to_json(*args, **kwargs) - - lambda_bootstrap.to_json = sentry_to_json - else: - lambda_bootstrap.LambdaRuntimeClient.post_init_error = _wrap_init_error( - lambda_bootstrap.LambdaRuntimeClient.post_init_error - ) + return f(*args, **kwargs) - old_handle_event_request = lambda_bootstrap.handle_event_request + return inner # type: ignore - def sentry_handle_event_request( # type: ignore - lambda_runtime_client, request_handler, *args, **kwargs - ): - request_handler = _wrap_handler(request_handler) - return old_handle_event_request( - lambda_runtime_client, request_handler, *args, **kwargs - ) - - lambda_bootstrap.handle_event_request = sentry_handle_event_request - - # Patch the runtime client to drain the queue. This should work - # even when the SDK is initialized inside of the handler - - def _wrap_post_function(f): - # type: (F) -> F - def inner(*args, **kwargs): - # type: (*Any, **Any) -> Any - _drain_queue() - return f(*args, **kwargs) - - return inner # type: ignore - - lambda_bootstrap.LambdaRuntimeClient.post_invocation_result = ( - _wrap_post_function( - lambda_bootstrap.LambdaRuntimeClient.post_invocation_result - ) + lambda_bootstrap.LambdaRuntimeClient.post_invocation_result = ( + _wrap_post_function( + lambda_bootstrap.LambdaRuntimeClient.post_invocation_result ) - lambda_bootstrap.LambdaRuntimeClient.post_invocation_error = ( - _wrap_post_function( - lambda_bootstrap.LambdaRuntimeClient.post_invocation_error - ) + ) + lambda_bootstrap.LambdaRuntimeClient.post_invocation_error = ( + _wrap_post_function( + lambda_bootstrap.LambdaRuntimeClient.post_invocation_error ) + ) def get_lambda_bootstrap(): diff --git a/sentry_sdk/metrics.py b/sentry_sdk/metrics.py index da6d77c69a..08aedc71d0 100644 --- a/sentry_sdk/metrics.py +++ b/sentry_sdk/metrics.py @@ -16,7 +16,6 @@ from sentry_sdk.utils import ( ContextVar, now, - nanosecond_time, to_timestamp, serialize_frame, json_dumps, @@ -362,9 +361,9 @@ def _encode_locations(timestamp, code_locations): # some of these are dumb TIMING_FUNCTIONS = { - "nanosecond": nanosecond_time, - "microsecond": lambda: nanosecond_time() / 1000.0, - "millisecond": lambda: nanosecond_time() / 1000000.0, + "nanosecond": time.perf_counter_ns, + "microsecond": lambda: time.perf_counter_ns() / 1000.0, + "millisecond": lambda: time.perf_counter_ns() / 1000000.0, "second": now, "minute": lambda: now() / 60.0, "hour": lambda: now() / 3600.0, diff --git a/sentry_sdk/profiler/transaction_profiler.py b/sentry_sdk/profiler/transaction_profiler.py index a4c5e77ca9..caa02408b2 100644 --- a/sentry_sdk/profiler/transaction_profiler.py +++ b/sentry_sdk/profiler/transaction_profiler.py @@ -48,7 +48,6 @@ is_gevent, is_valid_sample_rate, logger, - nanosecond_time, set_in_app_in_frames, ) @@ -330,7 +329,7 @@ def start(self): logger.debug("[Profiling] Starting profile") self.active = True if not self.start_ns: - self.start_ns = nanosecond_time() + self.start_ns = time.perf_counter_ns() self.scheduler.start_profiling(self) def stop(self): @@ -341,7 +340,7 @@ def stop(self): assert self.scheduler, "No scheduler specified" logger.debug("[Profiling] Stopping profile") self.active = False - self.stop_ns = nanosecond_time() + self.stop_ns = time.perf_counter_ns() def __enter__(self): # type: () -> Profile @@ -580,7 +579,7 @@ def _sample_stack(*args, **kwargs): # were started after this point. new_profiles = len(self.new_profiles) - now = nanosecond_time() + now = time.perf_counter_ns() try: sample = [ diff --git a/sentry_sdk/scope.py b/sentry_sdk/scope.py index 61198bf720..f9ed9e4474 100644 --- a/sentry_sdk/scope.py +++ b/sentry_sdk/scope.py @@ -30,7 +30,6 @@ capture_internal_exception, capture_internal_exceptions, ContextVar, - datetime_from_isoformat, disable_capture_event, event_from_exception, exc_info_from_error, @@ -1258,7 +1257,7 @@ def _apply_breadcrumbs_to_event(self, event, hint, options): try: for crumb in event["breadcrumbs"]["values"]: if isinstance(crumb["timestamp"], str): - crumb["timestamp"] = datetime_from_isoformat(crumb["timestamp"]) + crumb["timestamp"] = datetime.fromisoformat(crumb["timestamp"]) event["breadcrumbs"]["values"].sort(key=lambda crumb: crumb["timestamp"]) except Exception: diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py index 82c7ede10a..7daeb9de23 100644 --- a/sentry_sdk/tracing.py +++ b/sentry_sdk/tracing.py @@ -1,5 +1,6 @@ import uuid import random +import time import warnings from datetime import datetime, timedelta, timezone @@ -15,7 +16,6 @@ get_current_thread_meta, is_valid_sample_rate, logger, - nanosecond_time, ) from typing import TYPE_CHECKING, cast @@ -303,7 +303,7 @@ def __init__( try: # profiling depends on this value and requires that # it is measured in nanoseconds - self._start_timestamp_monotonic_ns = nanosecond_time() + self._start_timestamp_monotonic_ns = time.perf_counter_ns() except AttributeError: pass @@ -612,7 +612,7 @@ def finish(self, scope=None, end_timestamp=None): end_timestamp = datetime.fromtimestamp(end_timestamp, timezone.utc) self.timestamp = end_timestamp else: - elapsed = nanosecond_time() - self._start_timestamp_monotonic_ns + elapsed = time.perf_counter_ns() - self._start_timestamp_monotonic_ns self.timestamp = self.start_timestamp + timedelta( microseconds=elapsed / 1000 ) diff --git a/sentry_sdk/utils.py b/sentry_sdk/utils.py index b21c27b619..5fb3cf604c 100644 --- a/sentry_sdk/utils.py +++ b/sentry_sdk/utils.py @@ -25,7 +25,6 @@ BaseExceptionGroup = None # type: ignore import sentry_sdk -from sentry_sdk._compat import PY37 from sentry_sdk.consts import DEFAULT_MAX_VALUE_LENGTH, EndpointType from typing import TYPE_CHECKING @@ -239,15 +238,6 @@ def format_timestamp(value): return utctime.strftime("%Y-%m-%dT%H:%M:%S.%fZ") -def datetime_from_isoformat(value): - # type: (str) -> datetime - try: - return datetime.fromisoformat(value) - except AttributeError: - # py 3.6 - return datetime.strptime(value, "%Y-%m-%dT%H:%M:%S.%f") - - def event_hint_with_exc_info(exc_info=None): # type: (Optional[ExcInfo]) -> Dict[str, Optional[ExcInfo]] """Creates a hint with the exc info filled in.""" @@ -1325,27 +1315,13 @@ def _get_contextvars(): See https://docs.sentry.io/platforms/python/contextvars/ for more information. """ if not _is_contextvars_broken(): - # aiocontextvars is a PyPI package that ensures that the contextvars - # backport (also a PyPI package) works with asyncio under Python 3.6 - # - # Import it if available. - if sys.version_info < (3, 7): - # `aiocontextvars` is absolutely required for functional - # contextvars on Python 3.6. - try: - from aiocontextvars import ContextVar - - return True, ContextVar - except ImportError: - pass - else: - # On Python 3.7 contextvars are functional. - try: - from contextvars import ContextVar + # On Python 3.7+ contextvars are functional. + try: + from contextvars import ContextVar - return True, ContextVar - except ImportError: - pass + return True, ContextVar + except ImportError: + pass # Fall back to basic thread-local usage. @@ -1830,19 +1806,6 @@ async def runner(*args: "P.args", **kwargs: "P.kwargs"): return patcher -if PY37: - - def nanosecond_time(): - # type: () -> int - return time.perf_counter_ns() - -else: - - def nanosecond_time(): - # type: () -> int - return int(time.perf_counter() * 1e9) - - def now(): # type: () -> float return time.perf_counter() diff --git a/tests/test_metrics.py b/tests/test_metrics.py index 537f8a9646..ece17779d7 100644 --- a/tests/test_metrics.py +++ b/tests/test_metrics.py @@ -943,29 +943,3 @@ def bad_capture_envelope(*args, **kwargs): m = parse_metrics(envelope.items[0].payload.get_bytes()) assert len(m) == 1 assert m[0][1] == "counter@none" - - -@pytest.mark.skipif( - not gevent or sys.version_info >= (3, 7), - reason="Python 3.6 or lower and gevent required", -) -@pytest.mark.forked -def test_disable_metrics_for_old_python_with_gevent( - sentry_init, capture_envelopes, maybe_monkeypatched_threading -): - if maybe_monkeypatched_threading != "greenlet": - pytest.skip("Test specifically for gevent/greenlet") - - sentry_init( - release="fun-release", - environment="not-fun-env", - _experiments={"enable_metrics": True}, - ) - envelopes = capture_envelopes() - - metrics.incr("counter") - - sentry_sdk.flush() - - assert sentry_sdk.get_client().metrics_aggregator is None - assert not envelopes From 0653357e2c9ce41a4514ce388433bd62b197a747 Mon Sep 17 00:00:00 2001 From: Ivana Kellyer Date: Wed, 11 Sep 2024 14:11:58 +0200 Subject: [PATCH 038/244] Drop metrics (#3518) --- MIGRATION_GUIDE.md | 2 + .../sentryPythonDeleteTestFunctions/README.md | 4 +- .../lambda_function.py | 5 - sentry_sdk/_types.py | 23 - sentry_sdk/client.py | 19 - sentry_sdk/consts.py | 8 - sentry_sdk/envelope.py | 4 +- sentry_sdk/metrics.py | 969 ------------------ sentry_sdk/tracing.py | 27 - sentry_sdk/transport.py | 20 +- tests/test_metrics.py | 945 ----------------- tests/test_transport.py | 113 +- 12 files changed, 8 insertions(+), 2131 deletions(-) delete mode 100644 sentry_sdk/metrics.py delete mode 100644 tests/test_metrics.py diff --git a/MIGRATION_GUIDE.md b/MIGRATION_GUIDE.md index f360820dc3..2be1ad892b 100644 --- a/MIGRATION_GUIDE.md +++ b/MIGRATION_GUIDE.md @@ -21,6 +21,8 @@ Looking to upgrade from Sentry SDK 2.x to 3.x? Here's a comprehensive list of wh ### Removed - Dropped support for Python 3.6. +- `sentry_sdk.metrics` and associated metrics APIs have been removed as Sentry no longer accepts metrics data in this form. See https://sentry.zendesk.com/hc/en-us/articles/26369339769883-Upcoming-API-Changes-to-Metrics +- The experimental options `enable_metrics`, `before_emit_metric` and `metric_code_locations` have been removed. - When setting span status, the HTTP status code is no longer automatically added as a tag. - Class `Hub` has been removed. - Class `_ScopeManager` has been removed. diff --git a/scripts/aws_lambda_functions/sentryPythonDeleteTestFunctions/README.md b/scripts/aws_lambda_functions/sentryPythonDeleteTestFunctions/README.md index de1120a026..a5cc1d8d42 100644 --- a/scripts/aws_lambda_functions/sentryPythonDeleteTestFunctions/README.md +++ b/scripts/aws_lambda_functions/sentryPythonDeleteTestFunctions/README.md @@ -9,5 +9,5 @@ The Lambda function has been deployed here: - Region: `us-east-1` - Function ARN: `arn:aws:lambda:us-east-1:943013980633:function:sentryPythonDeleteTestFunctions` -This function also emits Sentry Metrics and Sentry Crons checkins to the `sentry-python` project in the `Sentry SDKs` organisation on Sentry.io: -https://sentry-sdks.sentry.io/projects/sentry-python/?project=5461230 \ No newline at end of file +This function also emits Sentry Crons checkins to the `sentry-python` project in the `Sentry SDKs` organisation on Sentry.io: +https://sentry-sdks.sentry.io/projects/sentry-python/?project=5461230 diff --git a/scripts/aws_lambda_functions/sentryPythonDeleteTestFunctions/lambda_function.py b/scripts/aws_lambda_functions/sentryPythonDeleteTestFunctions/lambda_function.py index ce7afb6aa4..c365ec2aca 100644 --- a/scripts/aws_lambda_functions/sentryPythonDeleteTestFunctions/lambda_function.py +++ b/scripts/aws_lambda_functions/sentryPythonDeleteTestFunctions/lambda_function.py @@ -44,11 +44,6 @@ def delete_lambda_functions(prefix="test_"): def lambda_handler(event, context): functions_deleted = delete_lambda_functions() - sentry_sdk.metrics.gauge( - key="num_aws_functions_deleted", - value=functions_deleted, - ) - return { "statusCode": 200, "body": f"{functions_deleted} AWS Lambda functions deleted successfully.", diff --git a/sentry_sdk/_types.py b/sentry_sdk/_types.py index 4e3c195cc6..0ee3921862 100644 --- a/sentry_sdk/_types.py +++ b/sentry_sdk/_types.py @@ -17,7 +17,6 @@ from typing import Mapping from typing import NotRequired from typing import Optional - from typing import Tuple from typing import Type from typing import Union from typing_extensions import Literal, TypedDict @@ -118,7 +117,6 @@ class SDKInfo(TypedDict): "transaction_info": Mapping[str, Any], # TODO: We can expand on this type "type": Literal["check_in", "transaction"], "user": dict[str, object], - "_metrics_summary": dict[str, object], }, total=False, ) @@ -156,7 +154,6 @@ class SDKInfo(TypedDict): "internal", "profile", "profile_chunk", - "metric_bucket", "monitor", "span", ] @@ -165,26 +162,6 @@ class SDKInfo(TypedDict): ContinuousProfilerMode = Literal["thread", "gevent", "unknown"] ProfilerMode = Union[ContinuousProfilerMode, Literal["sleep"]] - # Type of the metric. - MetricType = Literal["d", "s", "g", "c"] - - # Value of the metric. - MetricValue = Union[int, float, str] - - # Internal representation of tags as a tuple of tuples (this is done in order to allow for the same key to exist - # multiple times). - MetricTagsInternal = Tuple[Tuple[str, str], ...] - - # External representation of tags as a dictionary. - MetricTagValue = Union[str, int, float, None] - MetricTags = Mapping[str, MetricTagValue] - - # Value inside the generator for the metric value. - FlushedMetricValue = Union[int, float] - - BucketKey = Tuple[MetricType, str, MeasurementUnit, MetricTagsInternal] - MetricMetaKey = Tuple[MetricType, str, MeasurementUnit] - MonitorConfigScheduleType = Literal["crontab", "interval"] MonitorConfigScheduleUnit = Literal[ "year", diff --git a/sentry_sdk/client.py b/sentry_sdk/client.py index 04737de981..90b212275b 100644 --- a/sentry_sdk/client.py +++ b/sentry_sdk/client.py @@ -55,7 +55,6 @@ from sentry_sdk._types import Event, Hint, SDKInfo from sentry_sdk.integrations import Integration - from sentry_sdk.metrics import MetricsAggregator from sentry_sdk.scope import Scope from sentry_sdk.session import Session from sentry_sdk.transport import Transport @@ -146,7 +145,6 @@ def __init__(self, options=None): self.transport = None # type: Optional[Transport] self.monitor = None # type: Optional[Monitor] - self.metrics_aggregator = None # type: Optional[MetricsAggregator] def __getstate__(self, *args, **kwargs): # type: (*Any, **Any) -> Any @@ -308,18 +306,6 @@ def _capture_envelope(envelope): self.session_flusher = SessionFlusher(capture_func=_capture_envelope) - self.metrics_aggregator = None # type: Optional[MetricsAggregator] - experiments = self.options.get("_experiments", {}) - if experiments.get("enable_metrics", True): - from sentry_sdk.metrics import MetricsAggregator - - self.metrics_aggregator = MetricsAggregator( - capture_func=_capture_envelope, - enable_code_locations=bool( - experiments.get("metric_code_locations", True) - ), - ) - max_request_body_size = ("always", "never", "small", "medium") if self.options["max_request_body_size"] not in max_request_body_size: raise ValueError( @@ -377,7 +363,6 @@ def _capture_envelope(envelope): if ( self.monitor - or self.metrics_aggregator or has_profiling_enabled(self.options) or isinstance(self.transport, HttpTransport) ): @@ -810,8 +795,6 @@ def close( if self.transport is not None: self.flush(timeout=timeout, callback=callback) self.session_flusher.kill() - if self.metrics_aggregator is not None: - self.metrics_aggregator.kill() if self.monitor: self.monitor.kill() self.transport.kill() @@ -834,8 +817,6 @@ def flush( if timeout is None: timeout = self.options["shutdown_timeout"] self.session_flusher.flush() - if self.metrics_aggregator is not None: - self.metrics_aggregator.flush() self.transport.flush(timeout=timeout, callback=callback) def __enter__(self): diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py index 3178b2b379..8ded9f5937 100644 --- a/sentry_sdk/consts.py +++ b/sentry_sdk/consts.py @@ -38,12 +38,9 @@ class EndpointType(Enum): Event, EventProcessor, Hint, - MeasurementUnit, ProfilerMode, TracesSampler, TransactionProcessor, - MetricTags, - MetricValue, ) # Experiments are feature flags to enable and disable certain unstable SDK @@ -61,11 +58,6 @@ class EndpointType(Enum): "otel_powered_performance": Optional[bool], "transport_zlib_compression_level": Optional[int], "transport_num_pools": Optional[int], - "enable_metrics": Optional[bool], - "before_emit_metric": Optional[ - Callable[[str, MetricValue, MeasurementUnit, MetricTags], bool] - ], - "metric_code_locations": Optional[bool], }, total=False, ) diff --git a/sentry_sdk/envelope.py b/sentry_sdk/envelope.py index 760116daa1..2bad3db700 100644 --- a/sentry_sdk/envelope.py +++ b/sentry_sdk/envelope.py @@ -274,8 +274,6 @@ def data_category(self): return "profile" elif ty == "profile_chunk": return "profile_chunk" - elif ty == "statsd": - return "metric_bucket" elif ty == "check_in": return "monitor" else: @@ -335,7 +333,7 @@ def deserialize_from( # if no length was specified we need to read up to the end of line # and remove it (if it is present, i.e. not the very last char in an eof terminated envelope) payload = f.readline().rstrip(b"\n") - if headers.get("type") in ("event", "transaction", "metric_buckets"): + if headers.get("type") in ("event", "transaction"): rv = cls(headers=headers, payload=PayloadRef(json=parse_json(payload))) else: rv = cls(headers=headers, payload=payload) diff --git a/sentry_sdk/metrics.py b/sentry_sdk/metrics.py deleted file mode 100644 index 08aedc71d0..0000000000 --- a/sentry_sdk/metrics.py +++ /dev/null @@ -1,969 +0,0 @@ -import io -import os -import random -import re -import sys -import threading -import time -import warnings -import zlib -from abc import ABC, abstractmethod -from contextlib import contextmanager -from datetime import datetime, timezone -from functools import wraps, partial - -import sentry_sdk -from sentry_sdk.utils import ( - ContextVar, - now, - to_timestamp, - serialize_frame, - json_dumps, -) -from sentry_sdk.envelope import Envelope, Item -from sentry_sdk.tracing import ( - TRANSACTION_SOURCE_ROUTE, - TRANSACTION_SOURCE_VIEW, - TRANSACTION_SOURCE_COMPONENT, - TRANSACTION_SOURCE_TASK, -) - -from typing import TYPE_CHECKING - -if TYPE_CHECKING: - from typing import Any - from typing import Callable - from typing import Dict - from typing import Generator - from typing import Iterable - from typing import List - from typing import Optional - from typing import Set - from typing import Tuple - from typing import Union - - from sentry_sdk._types import BucketKey - from sentry_sdk._types import DurationUnit - from sentry_sdk._types import FlushedMetricValue - from sentry_sdk._types import MeasurementUnit - from sentry_sdk._types import MetricMetaKey - from sentry_sdk._types import MetricTagValue - from sentry_sdk._types import MetricTags - from sentry_sdk._types import MetricTagsInternal - from sentry_sdk._types import MetricType - from sentry_sdk._types import MetricValue - - -warnings.warn( - "The sentry_sdk.metrics module is deprecated and will be removed in the next major release. " - "Sentry will reject all metrics sent after October 7, 2024. " - "Learn more: https://sentry.zendesk.com/hc/en-us/articles/26369339769883-Upcoming-API-Changes-to-Metrics", - DeprecationWarning, - stacklevel=2, -) - -_in_metrics = ContextVar("in_metrics", default=False) -_set = set # set is shadowed below - -GOOD_TRANSACTION_SOURCES = frozenset( - [ - TRANSACTION_SOURCE_ROUTE, - TRANSACTION_SOURCE_VIEW, - TRANSACTION_SOURCE_COMPONENT, - TRANSACTION_SOURCE_TASK, - ] -) - -_sanitize_unit = partial(re.compile(r"[^a-zA-Z0-9_]+").sub, "") -_sanitize_metric_key = partial(re.compile(r"[^a-zA-Z0-9_\-.]+").sub, "_") -_sanitize_tag_key = partial(re.compile(r"[^a-zA-Z0-9_\-.\/]+").sub, "") - - -def _sanitize_tag_value(value): - # type: (str) -> str - table = str.maketrans( - { - "\n": "\\n", - "\r": "\\r", - "\t": "\\t", - "\\": "\\\\", - "|": "\\u{7c}", - ",": "\\u{2c}", - } - ) - return value.translate(table) - - -def get_code_location(stacklevel): - # type: (int) -> Optional[Dict[str, Any]] - try: - frm = sys._getframe(stacklevel) - except Exception: - return None - - return serialize_frame( - frm, include_local_variables=False, include_source_context=True - ) - - -@contextmanager -def recursion_protection(): - # type: () -> Generator[bool, None, None] - """Enters recursion protection and returns the old flag.""" - old_in_metrics = _in_metrics.get() - _in_metrics.set(True) - try: - yield old_in_metrics - finally: - _in_metrics.set(old_in_metrics) - - -def metrics_noop(func): - # type: (Any) -> Any - """Convenient decorator that uses `recursion_protection` to - make a function a noop. - """ - - @wraps(func) - def new_func(*args, **kwargs): - # type: (*Any, **Any) -> Any - with recursion_protection() as in_metrics: - if not in_metrics: - return func(*args, **kwargs) - - return new_func - - -class Metric(ABC): - __slots__ = () - - @abstractmethod - def __init__(self, first): - # type: (MetricValue) -> None - pass - - @property - @abstractmethod - def weight(self): - # type: () -> int - pass - - @abstractmethod - def add(self, value): - # type: (MetricValue) -> None - pass - - @abstractmethod - def serialize_value(self): - # type: () -> Iterable[FlushedMetricValue] - pass - - -class CounterMetric(Metric): - __slots__ = ("value",) - - def __init__( - self, first # type: MetricValue - ): - # type: (...) -> None - self.value = float(first) - - @property - def weight(self): - # type: (...) -> int - return 1 - - def add( - self, value # type: MetricValue - ): - # type: (...) -> None - self.value += float(value) - - def serialize_value(self): - # type: (...) -> Iterable[FlushedMetricValue] - return (self.value,) - - -class GaugeMetric(Metric): - __slots__ = ( - "last", - "min", - "max", - "sum", - "count", - ) - - def __init__( - self, first # type: MetricValue - ): - # type: (...) -> None - first = float(first) - self.last = first - self.min = first - self.max = first - self.sum = first - self.count = 1 - - @property - def weight(self): - # type: (...) -> int - # Number of elements. - return 5 - - def add( - self, value # type: MetricValue - ): - # type: (...) -> None - value = float(value) - self.last = value - self.min = min(self.min, value) - self.max = max(self.max, value) - self.sum += value - self.count += 1 - - def serialize_value(self): - # type: (...) -> Iterable[FlushedMetricValue] - return ( - self.last, - self.min, - self.max, - self.sum, - self.count, - ) - - -class DistributionMetric(Metric): - __slots__ = ("value",) - - def __init__( - self, first # type: MetricValue - ): - # type(...) -> None - self.value = [float(first)] - - @property - def weight(self): - # type: (...) -> int - return len(self.value) - - def add( - self, value # type: MetricValue - ): - # type: (...) -> None - self.value.append(float(value)) - - def serialize_value(self): - # type: (...) -> Iterable[FlushedMetricValue] - return self.value - - -class SetMetric(Metric): - __slots__ = ("value",) - - def __init__( - self, first # type: MetricValue - ): - # type: (...) -> None - self.value = {first} - - @property - def weight(self): - # type: (...) -> int - return len(self.value) - - def add( - self, value # type: MetricValue - ): - # type: (...) -> None - self.value.add(value) - - def serialize_value(self): - # type: (...) -> Iterable[FlushedMetricValue] - def _hash(x): - # type: (MetricValue) -> int - if isinstance(x, str): - return zlib.crc32(x.encode("utf-8")) & 0xFFFFFFFF - return int(x) - - return (_hash(value) for value in self.value) - - -def _encode_metrics(flushable_buckets): - # type: (Iterable[Tuple[int, Dict[BucketKey, Metric]]]) -> bytes - out = io.BytesIO() - _write = out.write - - # Note on sanitization: we intentionally sanitize in emission (serialization) - # and not during aggregation for performance reasons. This means that the - # envelope can in fact have duplicate buckets stored. This is acceptable for - # relay side emission and should not happen commonly. - - for timestamp, buckets in flushable_buckets: - for bucket_key, metric in buckets.items(): - metric_type, metric_name, metric_unit, metric_tags = bucket_key - metric_name = _sanitize_metric_key(metric_name) - metric_unit = _sanitize_unit(metric_unit) - _write(metric_name.encode("utf-8")) - _write(b"@") - _write(metric_unit.encode("utf-8")) - - for serialized_value in metric.serialize_value(): - _write(b":") - _write(str(serialized_value).encode("utf-8")) - - _write(b"|") - _write(metric_type.encode("ascii")) - - if metric_tags: - _write(b"|#") - first = True - for tag_key, tag_value in metric_tags: - tag_key = _sanitize_tag_key(tag_key) - if not tag_key: - continue - if first: - first = False - else: - _write(b",") - _write(tag_key.encode("utf-8")) - _write(b":") - _write(_sanitize_tag_value(tag_value).encode("utf-8")) - - _write(b"|T") - _write(str(timestamp).encode("ascii")) - _write(b"\n") - - return out.getvalue() - - -def _encode_locations(timestamp, code_locations): - # type: (int, Iterable[Tuple[MetricMetaKey, Dict[str, Any]]]) -> bytes - mapping = {} # type: Dict[str, List[Any]] - - for key, loc in code_locations: - metric_type, name, unit = key - mri = "{}:{}@{}".format( - metric_type, _sanitize_metric_key(name), _sanitize_unit(unit) - ) - - loc["type"] = "location" - mapping.setdefault(mri, []).append(loc) - - return json_dumps({"timestamp": timestamp, "mapping": mapping}) - - -METRIC_TYPES = { - "c": CounterMetric, - "g": GaugeMetric, - "d": DistributionMetric, - "s": SetMetric, -} # type: dict[MetricType, type[Metric]] - -# some of these are dumb -TIMING_FUNCTIONS = { - "nanosecond": time.perf_counter_ns, - "microsecond": lambda: time.perf_counter_ns() / 1000.0, - "millisecond": lambda: time.perf_counter_ns() / 1000000.0, - "second": now, - "minute": lambda: now() / 60.0, - "hour": lambda: now() / 3600.0, - "day": lambda: now() / 3600.0 / 24.0, - "week": lambda: now() / 3600.0 / 24.0 / 7.0, -} - - -class LocalAggregator: - __slots__ = ("_measurements",) - - def __init__(self): - # type: (...) -> None - self._measurements = ( - {} - ) # type: Dict[Tuple[str, MetricTagsInternal], Tuple[float, float, int, float]] - - def add( - self, - ty, # type: MetricType - key, # type: str - value, # type: float - unit, # type: MeasurementUnit - tags, # type: MetricTagsInternal - ): - # type: (...) -> None - export_key = "%s:%s@%s" % (ty, key, unit) - bucket_key = (export_key, tags) - - old = self._measurements.get(bucket_key) - if old is not None: - v_min, v_max, v_count, v_sum = old - v_min = min(v_min, value) - v_max = max(v_max, value) - v_count += 1 - v_sum += value - else: - v_min = v_max = v_sum = value - v_count = 1 - self._measurements[bucket_key] = (v_min, v_max, v_count, v_sum) - - def to_json(self): - # type: (...) -> Dict[str, Any] - rv = {} # type: Any - for (export_key, tags), ( - v_min, - v_max, - v_count, - v_sum, - ) in self._measurements.items(): - rv.setdefault(export_key, []).append( - { - "tags": _tags_to_dict(tags), - "min": v_min, - "max": v_max, - "count": v_count, - "sum": v_sum, - } - ) - return rv - - -class MetricsAggregator: - ROLLUP_IN_SECONDS = 10.0 - MAX_WEIGHT = 100000 - FLUSHER_SLEEP_TIME = 5.0 - - def __init__( - self, - capture_func, # type: Callable[[Envelope], None] - enable_code_locations=False, # type: bool - ): - # type: (...) -> None - self.buckets = {} # type: Dict[int, Any] - self._enable_code_locations = enable_code_locations - self._seen_locations = _set() # type: Set[Tuple[int, MetricMetaKey]] - self._pending_locations = {} # type: Dict[int, List[Tuple[MetricMetaKey, Any]]] - self._buckets_total_weight = 0 - self._capture_func = capture_func - self._running = True - self._lock = threading.Lock() - - self._flush_event = threading.Event() # type: threading.Event - self._force_flush = False - - # The aggregator shifts its flushing by up to an entire rollup window to - # avoid multiple clients trampling on end of a 10 second window as all the - # buckets are anchored to multiples of ROLLUP seconds. We randomize this - # number once per aggregator boot to achieve some level of offsetting - # across a fleet of deployed SDKs. Relay itself will also apply independent - # jittering. - self._flush_shift = random.random() * self.ROLLUP_IN_SECONDS - - self._flusher = None # type: Optional[threading.Thread] - self._flusher_pid = None # type: Optional[int] - - def _ensure_thread(self): - # type: (...) -> bool - """For forking processes we might need to restart this thread. - This ensures that our process actually has that thread running. - """ - if not self._running: - return False - - pid = os.getpid() - if self._flusher_pid == pid: - return True - - with self._lock: - # Recheck to make sure another thread didn't get here and start the - # the flusher in the meantime - if self._flusher_pid == pid: - return True - - self._flusher_pid = pid - - self._flusher = threading.Thread(target=self._flush_loop) - self._flusher.daemon = True - - try: - self._flusher.start() - except RuntimeError: - # Unfortunately at this point the interpreter is in a state that no - # longer allows us to spawn a thread and we have to bail. - self._running = False - return False - - return True - - def _flush_loop(self): - # type: (...) -> None - _in_metrics.set(True) - while self._running or self._force_flush: - if self._running: - self._flush_event.wait(self.FLUSHER_SLEEP_TIME) - self._flush() - - def _flush(self): - # type: (...) -> None - self._emit(self._flushable_buckets(), self._flushable_locations()) - - def _flushable_buckets(self): - # type: (...) -> (Iterable[Tuple[int, Dict[BucketKey, Metric]]]) - with self._lock: - force_flush = self._force_flush - cutoff = time.time() - self.ROLLUP_IN_SECONDS - self._flush_shift - flushable_buckets = () # type: Iterable[Tuple[int, Dict[BucketKey, Metric]]] - weight_to_remove = 0 - - if force_flush: - flushable_buckets = self.buckets.items() - self.buckets = {} - self._buckets_total_weight = 0 - self._force_flush = False - else: - flushable_buckets = [] - for buckets_timestamp, buckets in self.buckets.items(): - # If the timestamp of the bucket is newer that the rollup we want to skip it. - if buckets_timestamp <= cutoff: - flushable_buckets.append((buckets_timestamp, buckets)) - - # We will clear the elements while holding the lock, in order to avoid requesting it downstream again. - for buckets_timestamp, buckets in flushable_buckets: - for metric in buckets.values(): - weight_to_remove += metric.weight - del self.buckets[buckets_timestamp] - - self._buckets_total_weight -= weight_to_remove - - return flushable_buckets - - def _flushable_locations(self): - # type: (...) -> Dict[int, List[Tuple[MetricMetaKey, Dict[str, Any]]]] - with self._lock: - locations = self._pending_locations - self._pending_locations = {} - return locations - - @metrics_noop - def add( - self, - ty, # type: MetricType - key, # type: str - value, # type: MetricValue - unit, # type: MeasurementUnit - tags, # type: Optional[MetricTags] - timestamp=None, # type: Optional[Union[float, datetime]] - local_aggregator=None, # type: Optional[LocalAggregator] - stacklevel=0, # type: Optional[int] - ): - # type: (...) -> None - if not self._ensure_thread() or self._flusher is None: - return None - - if timestamp is None: - timestamp = time.time() - elif isinstance(timestamp, datetime): - timestamp = to_timestamp(timestamp) - - bucket_timestamp = int( - (timestamp // self.ROLLUP_IN_SECONDS) * self.ROLLUP_IN_SECONDS - ) - serialized_tags = _serialize_tags(tags) - bucket_key = ( - ty, - key, - unit, - serialized_tags, - ) - - with self._lock: - local_buckets = self.buckets.setdefault(bucket_timestamp, {}) - metric = local_buckets.get(bucket_key) - if metric is not None: - previous_weight = metric.weight - metric.add(value) - else: - metric = local_buckets[bucket_key] = METRIC_TYPES[ty](value) - previous_weight = 0 - - added = metric.weight - previous_weight - - if stacklevel is not None: - self.record_code_location(ty, key, unit, stacklevel + 2, timestamp) - - # Given the new weight we consider whether we want to force flush. - self._consider_force_flush() - - # For sets, we only record that a value has been added to the set but not which one. - # See develop docs: https://develop.sentry.dev/sdk/metrics/#sets - if local_aggregator is not None: - local_value = float(added if ty == "s" else value) - local_aggregator.add(ty, key, local_value, unit, serialized_tags) - - def record_code_location( - self, - ty, # type: MetricType - key, # type: str - unit, # type: MeasurementUnit - stacklevel, # type: int - timestamp=None, # type: Optional[float] - ): - # type: (...) -> None - if not self._enable_code_locations: - return - if timestamp is None: - timestamp = time.time() - meta_key = (ty, key, unit) - start_of_day = datetime.fromtimestamp(timestamp, timezone.utc).replace( - hour=0, minute=0, second=0, microsecond=0, tzinfo=None - ) - start_of_day = int(to_timestamp(start_of_day)) - - if (start_of_day, meta_key) not in self._seen_locations: - self._seen_locations.add((start_of_day, meta_key)) - loc = get_code_location(stacklevel + 3) - if loc is not None: - # Group metadata by day to make flushing more efficient. - # There needs to be one envelope item per timestamp. - self._pending_locations.setdefault(start_of_day, []).append( - (meta_key, loc) - ) - - @metrics_noop - def need_code_location( - self, - ty, # type: MetricType - key, # type: str - unit, # type: MeasurementUnit - timestamp, # type: float - ): - # type: (...) -> bool - if self._enable_code_locations: - return False - meta_key = (ty, key, unit) - start_of_day = datetime.fromtimestamp(timestamp, timezone.utc).replace( - hour=0, minute=0, second=0, microsecond=0, tzinfo=None - ) - start_of_day = int(to_timestamp(start_of_day)) - return (start_of_day, meta_key) not in self._seen_locations - - def kill(self): - # type: (...) -> None - if self._flusher is None: - return - - self._running = False - self._flush_event.set() - self._flusher = None - - @metrics_noop - def flush(self): - # type: (...) -> None - self._force_flush = True - self._flush() - - def _consider_force_flush(self): - # type: (...) -> None - # It's important to acquire a lock around this method, since it will touch shared data structures. - total_weight = len(self.buckets) + self._buckets_total_weight - if total_weight >= self.MAX_WEIGHT: - self._force_flush = True - self._flush_event.set() - - def _emit( - self, - flushable_buckets, # type: (Iterable[Tuple[int, Dict[BucketKey, Metric]]]) - code_locations, # type: Dict[int, List[Tuple[MetricMetaKey, Dict[str, Any]]]] - ): - # type: (...) -> Optional[Envelope] - envelope = Envelope() - - if flushable_buckets: - encoded_metrics = _encode_metrics(flushable_buckets) - envelope.add_item(Item(payload=encoded_metrics, type="statsd")) - - for timestamp, locations in code_locations.items(): - encoded_locations = _encode_locations(timestamp, locations) - envelope.add_item(Item(payload=encoded_locations, type="metric_meta")) - - if envelope.items: - self._capture_func(envelope) - return envelope - return None - - -def _serialize_tags( - tags, # type: Optional[MetricTags] -): - # type: (...) -> MetricTagsInternal - if not tags: - return () - - rv = [] - for key, value in tags.items(): - # If the value is a collection, we want to flatten it. - if isinstance(value, (list, tuple)): - for inner_value in value: - if inner_value is not None: - rv.append((key, str(inner_value))) - elif value is not None: - rv.append((key, str(value))) - - # It's very important to sort the tags in order to obtain the - # same bucket key. - return tuple(sorted(rv)) - - -def _tags_to_dict(tags): - # type: (MetricTagsInternal) -> Dict[str, Any] - rv = {} # type: Dict[str, Any] - for tag_name, tag_value in tags: - old_value = rv.get(tag_name) - if old_value is not None: - if isinstance(old_value, list): - old_value.append(tag_value) - else: - rv[tag_name] = [old_value, tag_value] - else: - rv[tag_name] = tag_value - return rv - - -def _get_aggregator(): - # type: () -> Optional[MetricsAggregator] - client = sentry_sdk.get_client() - return ( - client.metrics_aggregator - if client.is_active() and client.metrics_aggregator is not None - else None - ) - - -def _get_aggregator_and_update_tags(key, value, unit, tags): - # type: (str, Optional[MetricValue], MeasurementUnit, Optional[MetricTags]) -> Tuple[Optional[MetricsAggregator], Optional[LocalAggregator], Optional[MetricTags]] - client = sentry_sdk.get_client() - if not client.is_active() or client.metrics_aggregator is None: - return None, None, tags - - updated_tags = dict(tags or ()) # type: Dict[str, MetricTagValue] - updated_tags.setdefault("release", client.options["release"]) - updated_tags.setdefault("environment", client.options["environment"]) - - scope = sentry_sdk.get_current_scope() - local_aggregator = None - - # We go with the low-level API here to access transaction information as - # this one is the same between just errors and errors + performance - transaction_source = scope._transaction_info.get("source") - if transaction_source in GOOD_TRANSACTION_SOURCES: - transaction_name = scope._transaction - if transaction_name: - updated_tags.setdefault("transaction", transaction_name) - if scope._span is not None: - local_aggregator = scope._span._get_local_aggregator() - - experiments = client.options.get("_experiments", {}) - before_emit_callback = experiments.get("before_emit_metric") - if before_emit_callback is not None: - with recursion_protection() as in_metrics: - if not in_metrics: - if not before_emit_callback(key, value, unit, updated_tags): - return None, None, updated_tags - - return client.metrics_aggregator, local_aggregator, updated_tags - - -def increment( - key, # type: str - value=1.0, # type: float - unit="none", # type: MeasurementUnit - tags=None, # type: Optional[MetricTags] - timestamp=None, # type: Optional[Union[float, datetime]] - stacklevel=0, # type: int -): - # type: (...) -> None - """Increments a counter.""" - aggregator, local_aggregator, tags = _get_aggregator_and_update_tags( - key, value, unit, tags - ) - if aggregator is not None: - aggregator.add( - "c", key, value, unit, tags, timestamp, local_aggregator, stacklevel - ) - - -# alias as incr is relatively common in python -incr = increment - - -class _Timing: - def __init__( - self, - key, # type: str - tags, # type: Optional[MetricTags] - timestamp, # type: Optional[Union[float, datetime]] - value, # type: Optional[float] - unit, # type: DurationUnit - stacklevel, # type: int - ): - # type: (...) -> None - self.key = key - self.tags = tags - self.timestamp = timestamp - self.value = value - self.unit = unit - self.entered = None # type: Optional[float] - self._span = None # type: Optional[sentry_sdk.tracing.Span] - self.stacklevel = stacklevel - - def _validate_invocation(self, context): - # type: (str) -> None - if self.value is not None: - raise TypeError( - "cannot use timing as %s when a value is provided" % context - ) - - def __enter__(self): - # type: (...) -> _Timing - self.entered = TIMING_FUNCTIONS[self.unit]() - self._validate_invocation("context-manager") - self._span = sentry_sdk.start_span(op="metric.timing", description=self.key) - if self.tags: - for key, value in self.tags.items(): - if isinstance(value, (tuple, list)): - value = ",".join(sorted(map(str, value))) - self._span.set_tag(key, value) - self._span.__enter__() - - # report code locations here for better accuracy - aggregator = _get_aggregator() - if aggregator is not None: - aggregator.record_code_location("d", self.key, self.unit, self.stacklevel) - - return self - - def __exit__(self, exc_type, exc_value, tb): - # type: (Any, Any, Any) -> None - assert self._span, "did not enter" - aggregator, local_aggregator, tags = _get_aggregator_and_update_tags( - self.key, - self.value, - self.unit, - self.tags, - ) - if aggregator is not None: - elapsed = TIMING_FUNCTIONS[self.unit]() - self.entered # type: ignore - aggregator.add( - "d", - self.key, - elapsed, - self.unit, - tags, - self.timestamp, - local_aggregator, - None, # code locations are reported in __enter__ - ) - - self._span.__exit__(exc_type, exc_value, tb) - self._span = None - - def __call__(self, f): - # type: (Any) -> Any - self._validate_invocation("decorator") - - @wraps(f) - def timed_func(*args, **kwargs): - # type: (*Any, **Any) -> Any - with timing( - key=self.key, - tags=self.tags, - timestamp=self.timestamp, - unit=self.unit, - stacklevel=self.stacklevel + 1, - ): - return f(*args, **kwargs) - - return timed_func - - -def timing( - key, # type: str - value=None, # type: Optional[float] - unit="second", # type: DurationUnit - tags=None, # type: Optional[MetricTags] - timestamp=None, # type: Optional[Union[float, datetime]] - stacklevel=0, # type: int -): - # type: (...) -> _Timing - """Emits a distribution with the time it takes to run the given code block. - - This method supports three forms of invocation: - - - when a `value` is provided, it functions similar to `distribution` but with - - it can be used as a context manager - - it can be used as a decorator - """ - if value is not None: - aggregator, local_aggregator, tags = _get_aggregator_and_update_tags( - key, value, unit, tags - ) - if aggregator is not None: - aggregator.add( - "d", key, value, unit, tags, timestamp, local_aggregator, stacklevel - ) - return _Timing(key, tags, timestamp, value, unit, stacklevel) - - -def distribution( - key, # type: str - value, # type: float - unit="none", # type: MeasurementUnit - tags=None, # type: Optional[MetricTags] - timestamp=None, # type: Optional[Union[float, datetime]] - stacklevel=0, # type: int -): - # type: (...) -> None - """Emits a distribution.""" - aggregator, local_aggregator, tags = _get_aggregator_and_update_tags( - key, value, unit, tags - ) - if aggregator is not None: - aggregator.add( - "d", key, value, unit, tags, timestamp, local_aggregator, stacklevel - ) - - -def set( - key, # type: str - value, # type: Union[int, str] - unit="none", # type: MeasurementUnit - tags=None, # type: Optional[MetricTags] - timestamp=None, # type: Optional[Union[float, datetime]] - stacklevel=0, # type: int -): - # type: (...) -> None - """Emits a set.""" - aggregator, local_aggregator, tags = _get_aggregator_and_update_tags( - key, value, unit, tags - ) - if aggregator is not None: - aggregator.add( - "s", key, value, unit, tags, timestamp, local_aggregator, stacklevel - ) - - -def gauge( - key, # type: str - value, # type: float - unit="none", # type: MeasurementUnit - tags=None, # type: Optional[MetricTags] - timestamp=None, # type: Optional[Union[float, datetime]] - stacklevel=0, # type: int -): - # type: (...) -> None - """Emits a gauge.""" - aggregator, local_aggregator, tags = _get_aggregator_and_update_tags( - key, value, unit, tags - ) - if aggregator is not None: - aggregator.add( - "g", key, value, unit, tags, timestamp, local_aggregator, stacklevel - ) diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py index 7daeb9de23..270dd9d60f 100644 --- a/sentry_sdk/tracing.py +++ b/sentry_sdk/tracing.py @@ -1,7 +1,6 @@ import uuid import random import time -import warnings from datetime import datetime, timedelta, timezone from opentelemetry import trace as otel_trace, context @@ -259,7 +258,6 @@ class Span: "_span_recorder", "_context_manager_state", "_containing_transaction", - "_local_aggregator", "scope", "origin", ) @@ -311,7 +309,6 @@ def __init__( self.timestamp = None # type: Optional[datetime] self._span_recorder = None # type: Optional[_SpanRecorder] - self._local_aggregator = None # type: Optional[LocalAggregator] thread_id, thread_name = get_current_thread_meta() self.set_thread(thread_id, thread_name) @@ -324,13 +321,6 @@ def init_span_recorder(self, maxlen): if self._span_recorder is None: self._span_recorder = _SpanRecorder(maxlen) - def _get_local_aggregator(self): - # type: (...) -> LocalAggregator - rv = self._local_aggregator - if rv is None: - rv = self._local_aggregator = LocalAggregator() - return rv - def __repr__(self): # type: () -> str return ( @@ -643,11 +633,6 @@ def to_json(self): if self.status: self._tags["status"] = self.status - if self._local_aggregator is not None: - metrics_summary = self._local_aggregator.to_json() - if metrics_summary: - rv["_metrics_summary"] = metrics_summary - if len(self._measurements) > 0: rv["measurements"] = self._measurements @@ -926,13 +911,6 @@ def finish( event["measurements"] = self._measurements - # This is here since `to_json` is not invoked. This really should - # be gone when we switch to onlyspans. - if self._local_aggregator is not None: - metrics_summary = self._local_aggregator.to_json() - if metrics_summary: - event["_metrics_summary"] = metrics_summary - return scope.capture_event(event) def set_measurement(self, name, value, unit=""): @@ -1606,8 +1584,3 @@ async def my_async_function(): has_tracing_enabled, maybe_create_breadcrumbs_from_span, ) - -with warnings.catch_warnings(): - # The code in this file which uses `LocalAggregator` is only called from the deprecated `metrics` module. - warnings.simplefilter("ignore", DeprecationWarning) - from sentry_sdk.metrics import LocalAggregator diff --git a/sentry_sdk/transport.py b/sentry_sdk/transport.py index 3f03a5fb1f..c837e735a1 100644 --- a/sentry_sdk/transport.py +++ b/sentry_sdk/transport.py @@ -177,17 +177,7 @@ def _parse_rate_limits(header, now=None): retry_after = now + timedelta(seconds=int(retry_after)) for category in categories and categories.split(";") or (None,): - if category == "metric_bucket": - try: - namespaces = parameters[4].split(";") - except IndexError: - namespaces = [] - - if not namespaces or "custom" in namespaces: - yield category, retry_after - - else: - yield category, retry_after + yield category, retry_after except (LookupError, ValueError): continue @@ -386,12 +376,6 @@ def _check_disabled(self, category): # type: (str) -> bool def _disabled(bucket): # type: (Any) -> bool - - # The envelope item type used for metrics is statsd - # whereas the rate limit category is metric_bucket - if bucket == "statsd": - bucket = "metric_bucket" - ts = self._disabled_until.get(bucket) return ts is not None and ts > datetime.now(timezone.utc) @@ -420,7 +404,7 @@ def _send_envelope( new_items = [] for item in envelope.items: if self._check_disabled(item.data_category): - if item.data_category in ("transaction", "error", "default", "statsd"): + if item.data_category in ("transaction", "error", "default"): self.on_dropped_event("self_rate_limits") self.record_lost_event("ratelimit_backoff", item=item) else: diff --git a/tests/test_metrics.py b/tests/test_metrics.py deleted file mode 100644 index ece17779d7..0000000000 --- a/tests/test_metrics.py +++ /dev/null @@ -1,945 +0,0 @@ -import sys -import time -import linecache -from unittest import mock - -import pytest - -import sentry_sdk -from sentry_sdk import metrics -from sentry_sdk.tracing import TRANSACTION_SOURCE_ROUTE -from sentry_sdk.envelope import parse_json - -try: - import gevent -except ImportError: - gevent = None - - -minimum_python_37_with_gevent = pytest.mark.skipif( - gevent and sys.version_info < (3, 7), - reason="Require Python 3.7 or higher with gevent", -) - - -def parse_metrics(bytes): - rv = [] - for line in bytes.splitlines(): - pieces = line.decode("utf-8").split("|") - payload = pieces[0].split(":") - name = payload[0] - values = payload[1:] - ty = pieces[1] - ts = None - tags = {} - for piece in pieces[2:]: - if piece[0] == "#": - for pair in piece[1:].split(","): - k, v = pair.split(":", 1) - old = tags.get(k) - if old is not None: - if isinstance(old, list): - old.append(v) - else: - tags[k] = [old, v] - else: - tags[k] = v - elif piece[0] == "T": - ts = int(piece[1:]) - else: - raise ValueError("unknown piece %r" % (piece,)) - rv.append((ts, name, ty, values, tags)) - rv.sort(key=lambda x: (x[0], x[1], tuple(sorted(tags.items())))) - return rv - - -@minimum_python_37_with_gevent -@pytest.mark.forked -def test_increment(sentry_init, capture_envelopes, maybe_monkeypatched_threading): - sentry_init( - release="fun-release", - environment="not-fun-env", - _experiments={"enable_metrics": True, "metric_code_locations": True}, - ) - ts = time.time() - envelopes = capture_envelopes() - - metrics.increment("foobar", 1.0, tags={"foo": "bar", "blub": "blah"}, timestamp=ts) - # python specific alias - metrics.incr("foobar", 2.0, tags={"foo": "bar", "blub": "blah"}, timestamp=ts) - sentry_sdk.flush() - - (envelope,) = envelopes - statsd_item, meta_item = envelope.items - - assert statsd_item.headers["type"] == "statsd" - m = parse_metrics(statsd_item.payload.get_bytes()) - - assert len(m) == 1 - assert m[0][1] == "foobar@none" - assert m[0][2] == "c" - assert m[0][3] == ["3.0"] - assert m[0][4] == { - "blub": "blah", - "foo": "bar", - "release": "fun-release", - "environment": "not-fun-env", - } - - assert meta_item.headers["type"] == "metric_meta" - assert parse_json(meta_item.payload.get_bytes()) == { - "timestamp": mock.ANY, - "mapping": { - "c:foobar@none": [ - { - "type": "location", - "filename": "tests/test_metrics.py", - "abs_path": __file__, - "function": sys._getframe().f_code.co_name, - "module": __name__, - "lineno": mock.ANY, - "pre_context": mock.ANY, - "context_line": mock.ANY, - "post_context": mock.ANY, - } - ] - }, - } - - -@minimum_python_37_with_gevent -@pytest.mark.forked -def test_timing(sentry_init, capture_envelopes, maybe_monkeypatched_threading): - sentry_init( - release="fun-release@1.0.0", - environment="not-fun-env", - _experiments={"enable_metrics": True, "metric_code_locations": True}, - ) - ts = time.time() - envelopes = capture_envelopes() - - with metrics.timing("whatever", tags={"blub": "blah"}, timestamp=ts): - time.sleep(0.1) - sentry_sdk.flush() - - (envelope,) = envelopes - statsd_item, meta_item = envelope.items - - assert statsd_item.headers["type"] == "statsd" - m = parse_metrics(statsd_item.payload.get_bytes()) - - assert len(m) == 1 - assert m[0][1] == "whatever@second" - assert m[0][2] == "d" - assert len(m[0][3]) == 1 - assert float(m[0][3][0]) >= 0.1 - assert m[0][4] == { - "blub": "blah", - "release": "fun-release@1.0.0", - "environment": "not-fun-env", - } - - assert meta_item.headers["type"] == "metric_meta" - json = parse_json(meta_item.payload.get_bytes()) - assert json == { - "timestamp": mock.ANY, - "mapping": { - "d:whatever@second": [ - { - "type": "location", - "filename": "tests/test_metrics.py", - "abs_path": __file__, - "function": sys._getframe().f_code.co_name, - "module": __name__, - "lineno": mock.ANY, - "pre_context": mock.ANY, - "context_line": mock.ANY, - "post_context": mock.ANY, - } - ] - }, - } - - loc = json["mapping"]["d:whatever@second"][0] - line = linecache.getline(loc["abs_path"], loc["lineno"]) - assert ( - line.strip() - == 'with metrics.timing("whatever", tags={"blub": "blah"}, timestamp=ts):' - ) - - -@minimum_python_37_with_gevent -@pytest.mark.forked -def test_timing_decorator( - sentry_init, capture_envelopes, maybe_monkeypatched_threading -): - sentry_init( - release="fun-release@1.0.0", - environment="not-fun-env", - _experiments={"enable_metrics": True, "metric_code_locations": True}, - ) - envelopes = capture_envelopes() - - @metrics.timing("whatever-1", tags={"x": "y"}) - def amazing(): - time.sleep(0.1) - return 42 - - @metrics.timing("whatever-2", tags={"x": "y"}, unit="nanosecond") - def amazing_nano(): - time.sleep(0.01) - return 23 - - assert amazing() == 42 - assert amazing_nano() == 23 - sentry_sdk.flush() - - (envelope,) = envelopes - statsd_item, meta_item = envelope.items - - assert statsd_item.headers["type"] == "statsd" - m = parse_metrics(statsd_item.payload.get_bytes()) - - assert len(m) == 2 - assert m[0][1] == "whatever-1@second" - assert m[0][2] == "d" - assert len(m[0][3]) == 1 - assert float(m[0][3][0]) >= 0.1 - assert m[0][4] == { - "x": "y", - "release": "fun-release@1.0.0", - "environment": "not-fun-env", - } - - assert m[1][1] == "whatever-2@nanosecond" - assert m[1][2] == "d" - assert len(m[1][3]) == 1 - assert float(m[1][3][0]) >= 10000000.0 - assert m[1][4] == { - "x": "y", - "release": "fun-release@1.0.0", - "environment": "not-fun-env", - } - - assert meta_item.headers["type"] == "metric_meta" - json = parse_json(meta_item.payload.get_bytes()) - assert json == { - "timestamp": mock.ANY, - "mapping": { - "d:whatever-1@second": [ - { - "type": "location", - "filename": "tests/test_metrics.py", - "abs_path": __file__, - "function": sys._getframe().f_code.co_name, - "module": __name__, - "lineno": mock.ANY, - "pre_context": mock.ANY, - "context_line": mock.ANY, - "post_context": mock.ANY, - } - ], - "d:whatever-2@nanosecond": [ - { - "type": "location", - "filename": "tests/test_metrics.py", - "abs_path": __file__, - "function": sys._getframe().f_code.co_name, - "module": __name__, - "lineno": mock.ANY, - "pre_context": mock.ANY, - "context_line": mock.ANY, - "post_context": mock.ANY, - } - ], - }, - } - - # XXX: this is not the best location. It would probably be better to - # report the location in the function, however that is quite a bit - # tricker to do since we report from outside the function so we really - # only see the callsite. - loc = json["mapping"]["d:whatever-1@second"][0] - line = linecache.getline(loc["abs_path"], loc["lineno"]) - assert line.strip() == "assert amazing() == 42" - - -@minimum_python_37_with_gevent -@pytest.mark.forked -def test_timing_basic(sentry_init, capture_envelopes, maybe_monkeypatched_threading): - sentry_init( - release="fun-release@1.0.0", - environment="not-fun-env", - _experiments={"enable_metrics": True, "metric_code_locations": True}, - ) - ts = time.time() - envelopes = capture_envelopes() - - metrics.timing("timing", 1.0, tags={"a": "b"}, timestamp=ts) - metrics.timing("timing", 2.0, tags={"a": "b"}, timestamp=ts) - metrics.timing("timing", 2.0, tags={"a": "b"}, timestamp=ts) - metrics.timing("timing", 3.0, tags={"a": "b"}, timestamp=ts) - sentry_sdk.flush() - - (envelope,) = envelopes - statsd_item, meta_item = envelope.items - - assert statsd_item.headers["type"] == "statsd" - m = parse_metrics(statsd_item.payload.get_bytes()) - - assert len(m) == 1 - assert m[0][1] == "timing@second" - assert m[0][2] == "d" - assert len(m[0][3]) == 4 - assert sorted(map(float, m[0][3])) == [1.0, 2.0, 2.0, 3.0] - assert m[0][4] == { - "a": "b", - "release": "fun-release@1.0.0", - "environment": "not-fun-env", - } - - assert meta_item.headers["type"] == "metric_meta" - assert parse_json(meta_item.payload.get_bytes()) == { - "timestamp": mock.ANY, - "mapping": { - "d:timing@second": [ - { - "type": "location", - "filename": "tests/test_metrics.py", - "abs_path": __file__, - "function": sys._getframe().f_code.co_name, - "module": __name__, - "lineno": mock.ANY, - "pre_context": mock.ANY, - "context_line": mock.ANY, - "post_context": mock.ANY, - } - ] - }, - } - - -@minimum_python_37_with_gevent -@pytest.mark.forked -def test_distribution(sentry_init, capture_envelopes, maybe_monkeypatched_threading): - sentry_init( - release="fun-release@1.0.0", - environment="not-fun-env", - _experiments={"enable_metrics": True, "metric_code_locations": True}, - ) - ts = time.time() - envelopes = capture_envelopes() - - metrics.distribution("dist", 1.0, tags={"a": "b"}, timestamp=ts) - metrics.distribution("dist", 2.0, tags={"a": "b"}, timestamp=ts) - metrics.distribution("dist", 2.0, tags={"a": "b"}, timestamp=ts) - metrics.distribution("dist", 3.0, tags={"a": "b"}, timestamp=ts) - sentry_sdk.flush() - - (envelope,) = envelopes - statsd_item, meta_item = envelope.items - - assert statsd_item.headers["type"] == "statsd" - m = parse_metrics(statsd_item.payload.get_bytes()) - - assert len(m) == 1 - assert m[0][1] == "dist@none" - assert m[0][2] == "d" - assert len(m[0][3]) == 4 - assert sorted(map(float, m[0][3])) == [1.0, 2.0, 2.0, 3.0] - assert m[0][4] == { - "a": "b", - "release": "fun-release@1.0.0", - "environment": "not-fun-env", - } - - assert meta_item.headers["type"] == "metric_meta" - json = parse_json(meta_item.payload.get_bytes()) - assert json == { - "timestamp": mock.ANY, - "mapping": { - "d:dist@none": [ - { - "type": "location", - "filename": "tests/test_metrics.py", - "abs_path": __file__, - "function": sys._getframe().f_code.co_name, - "module": __name__, - "lineno": mock.ANY, - "pre_context": mock.ANY, - "context_line": mock.ANY, - "post_context": mock.ANY, - } - ] - }, - } - - loc = json["mapping"]["d:dist@none"][0] - line = linecache.getline(loc["abs_path"], loc["lineno"]) - assert ( - line.strip() - == 'metrics.distribution("dist", 1.0, tags={"a": "b"}, timestamp=ts)' - ) - - -@minimum_python_37_with_gevent -@pytest.mark.forked -def test_set(sentry_init, capture_envelopes, maybe_monkeypatched_threading): - sentry_init( - release="fun-release@1.0.0", - environment="not-fun-env", - _experiments={"enable_metrics": True, "metric_code_locations": True}, - ) - ts = time.time() - envelopes = capture_envelopes() - - metrics.set("my-set", "peter", tags={"magic": "puff"}, timestamp=ts) - metrics.set("my-set", "paul", tags={"magic": "puff"}, timestamp=ts) - metrics.set("my-set", "mary", tags={"magic": "puff"}, timestamp=ts) - sentry_sdk.flush() - - (envelope,) = envelopes - statsd_item, meta_item = envelope.items - - assert statsd_item.headers["type"] == "statsd" - m = parse_metrics(statsd_item.payload.get_bytes()) - - assert len(m) == 1 - assert m[0][1] == "my-set@none" - assert m[0][2] == "s" - assert len(m[0][3]) == 3 - assert sorted(map(int, m[0][3])) == [354582103, 2513273657, 3329318813] - assert m[0][4] == { - "magic": "puff", - "release": "fun-release@1.0.0", - "environment": "not-fun-env", - } - - assert meta_item.headers["type"] == "metric_meta" - assert parse_json(meta_item.payload.get_bytes()) == { - "timestamp": mock.ANY, - "mapping": { - "s:my-set@none": [ - { - "type": "location", - "filename": "tests/test_metrics.py", - "abs_path": __file__, - "function": sys._getframe().f_code.co_name, - "module": __name__, - "lineno": mock.ANY, - "pre_context": mock.ANY, - "context_line": mock.ANY, - "post_context": mock.ANY, - } - ] - }, - } - - -@minimum_python_37_with_gevent -@pytest.mark.forked -def test_gauge(sentry_init, capture_envelopes, maybe_monkeypatched_threading): - sentry_init( - release="fun-release@1.0.0", - environment="not-fun-env", - _experiments={"enable_metrics": True, "metric_code_locations": False}, - ) - ts = time.time() - envelopes = capture_envelopes() - - metrics.gauge("my-gauge", 10.0, tags={"x": "y"}, timestamp=ts) - metrics.gauge("my-gauge", 20.0, tags={"x": "y"}, timestamp=ts) - metrics.gauge("my-gauge", 30.0, tags={"x": "y"}, timestamp=ts) - sentry_sdk.flush() - - (envelope,) = envelopes - - assert len(envelope.items) == 1 - assert envelope.items[0].headers["type"] == "statsd" - m = parse_metrics(envelope.items[0].payload.get_bytes()) - - assert len(m) == 1 - assert m[0][1] == "my-gauge@none" - assert m[0][2] == "g" - assert len(m[0][3]) == 5 - assert list(map(float, m[0][3])) == [30.0, 10.0, 30.0, 60.0, 3.0] - assert m[0][4] == { - "x": "y", - "release": "fun-release@1.0.0", - "environment": "not-fun-env", - } - - -@minimum_python_37_with_gevent -@pytest.mark.forked -def test_multiple(sentry_init, capture_envelopes): - sentry_init( - release="fun-release@1.0.0", - environment="not-fun-env", - _experiments={"enable_metrics": True, "metric_code_locations": False}, - ) - ts = time.time() - envelopes = capture_envelopes() - - metrics.gauge("my-gauge", 10.0, tags={"x": "y"}, timestamp=ts) - metrics.gauge("my-gauge", 20.0, tags={"x": "y"}, timestamp=ts) - metrics.gauge("my-gauge", 30.0, tags={"x": "y"}, timestamp=ts) - for _ in range(10): - metrics.increment("counter-1", 1.0, timestamp=ts) - metrics.increment("counter-2", 1.0, timestamp=ts) - - sentry_sdk.flush() - - (envelope,) = envelopes - - assert len(envelope.items) == 1 - assert envelope.items[0].headers["type"] == "statsd" - m = parse_metrics(envelope.items[0].payload.get_bytes()) - - assert len(m) == 3 - - assert m[0][1] == "counter-1@none" - assert m[0][2] == "c" - assert list(map(float, m[0][3])) == [10.0] - assert m[0][4] == { - "release": "fun-release@1.0.0", - "environment": "not-fun-env", - } - - assert m[1][1] == "counter-2@none" - assert m[1][2] == "c" - assert list(map(float, m[1][3])) == [1.0] - assert m[1][4] == { - "release": "fun-release@1.0.0", - "environment": "not-fun-env", - } - - assert m[2][1] == "my-gauge@none" - assert m[2][2] == "g" - assert len(m[2][3]) == 5 - assert list(map(float, m[2][3])) == [30.0, 10.0, 30.0, 60.0, 3.0] - assert m[2][4] == { - "x": "y", - "release": "fun-release@1.0.0", - "environment": "not-fun-env", - } - - -@minimum_python_37_with_gevent -@pytest.mark.forked -def test_transaction_name( - sentry_init, capture_envelopes, maybe_monkeypatched_threading -): - sentry_init( - release="fun-release@1.0.0", - environment="not-fun-env", - _experiments={"enable_metrics": True, "metric_code_locations": False}, - ) - ts = time.time() - envelopes = capture_envelopes() - - sentry_sdk.get_current_scope().set_transaction_name( - "/user/{user_id}", source="route" - ) - metrics.distribution("dist", 1.0, tags={"a": "b"}, timestamp=ts) - metrics.distribution("dist", 2.0, tags={"a": "b"}, timestamp=ts) - metrics.distribution("dist", 2.0, tags={"a": "b"}, timestamp=ts) - metrics.distribution("dist", 3.0, tags={"a": "b"}, timestamp=ts) - - sentry_sdk.flush() - - (envelope,) = envelopes - - assert len(envelope.items) == 1 - assert envelope.items[0].headers["type"] == "statsd" - m = parse_metrics(envelope.items[0].payload.get_bytes()) - - assert len(m) == 1 - assert m[0][1] == "dist@none" - assert m[0][2] == "d" - assert len(m[0][3]) == 4 - assert sorted(map(float, m[0][3])) == [1.0, 2.0, 2.0, 3.0] - assert m[0][4] == { - "a": "b", - "transaction": "/user/{user_id}", - "release": "fun-release@1.0.0", - "environment": "not-fun-env", - } - - -@minimum_python_37_with_gevent -@pytest.mark.forked -def test_metric_summaries( - sentry_init, capture_envelopes, maybe_monkeypatched_threading -): - sentry_init( - release="fun-release@1.0.0", - environment="not-fun-env", - enable_tracing=True, - ) - ts = time.time() - envelopes = capture_envelopes() - - with sentry_sdk.start_transaction( - op="stuff", name="/foo", source=TRANSACTION_SOURCE_ROUTE - ) as transaction: - metrics.increment("root-counter", timestamp=ts) - with metrics.timing("my-timer-metric", tags={"a": "b"}, timestamp=ts): - for x in range(10): - metrics.distribution("my-dist", float(x), timestamp=ts) - - sentry_sdk.flush() - - (transaction, envelope) = envelopes - - # Metrics Emission - assert envelope.items[0].headers["type"] == "statsd" - m = parse_metrics(envelope.items[0].payload.get_bytes()) - - assert len(m) == 3 - - assert m[0][1] == "my-dist@none" - assert m[0][2] == "d" - assert len(m[0][3]) == 10 - assert sorted(m[0][3]) == list(map(str, map(float, range(10)))) - assert m[0][4] == { - "transaction": "/foo", - "release": "fun-release@1.0.0", - "environment": "not-fun-env", - } - - assert m[1][1] == "my-timer-metric@second" - assert m[1][2] == "d" - assert len(m[1][3]) == 1 - assert m[1][4] == { - "a": "b", - "transaction": "/foo", - "release": "fun-release@1.0.0", - "environment": "not-fun-env", - } - - assert m[2][1] == "root-counter@none" - assert m[2][2] == "c" - assert m[2][3] == ["1.0"] - assert m[2][4] == { - "transaction": "/foo", - "release": "fun-release@1.0.0", - "environment": "not-fun-env", - } - - # Measurement Attachment - t = transaction.items[0].get_transaction_event() - - assert t["_metrics_summary"] == { - "c:root-counter@none": [ - { - "count": 1, - "min": 1.0, - "max": 1.0, - "sum": 1.0, - "tags": { - "transaction": "/foo", - "release": "fun-release@1.0.0", - "environment": "not-fun-env", - }, - } - ] - } - - assert t["spans"][0]["_metrics_summary"]["d:my-dist@none"] == [ - { - "count": 10, - "min": 0.0, - "max": 9.0, - "sum": 45.0, - "tags": { - "environment": "not-fun-env", - "release": "fun-release@1.0.0", - "transaction": "/foo", - }, - } - ] - - assert t["spans"][0]["tags"] == {"a": "b"} - (timer,) = t["spans"][0]["_metrics_summary"]["d:my-timer-metric@second"] - assert timer["count"] == 1 - assert timer["max"] == timer["min"] == timer["sum"] - assert timer["sum"] > 0 - assert timer["tags"] == { - "a": "b", - "environment": "not-fun-env", - "release": "fun-release@1.0.0", - "transaction": "/foo", - } - - -@minimum_python_37_with_gevent -@pytest.mark.forked -@pytest.mark.parametrize( - "metric_name,metric_unit,expected_name", - [ - ("first-metric", "nano-second", "first-metric@nanosecond"), - ("another_metric?", "nano second", "another_metric_@nanosecond"), - ( - "metric", - "nanosecond", - "metric@nanosecond", - ), - ( - "my.amaze.metric I guess", - "nano|\nsecond", - "my.amaze.metric_I_guess@nanosecond", - ), - ("métríc", "nanöseconď", "m_tr_c@nansecon"), - ], -) -def test_metric_name_normalization( - sentry_init, - capture_envelopes, - metric_name, - metric_unit, - expected_name, - maybe_monkeypatched_threading, -): - sentry_init( - _experiments={"enable_metrics": True, "metric_code_locations": False}, - ) - envelopes = capture_envelopes() - - metrics.distribution(metric_name, 1.0, unit=metric_unit) - - sentry_sdk.flush() - - (envelope,) = envelopes - - assert len(envelope.items) == 1 - assert envelope.items[0].headers["type"] == "statsd" - - parsed_metrics = parse_metrics(envelope.items[0].payload.get_bytes()) - assert len(parsed_metrics) == 1 - - name = parsed_metrics[0][1] - assert name == expected_name - - -@minimum_python_37_with_gevent -@pytest.mark.forked -@pytest.mark.parametrize( - "metric_tag,expected_tag", - [ - ({"f-oo|bar": "%$foo/"}, {"f-oobar": "%$foo/"}), - ({"foo$.$.$bar": "blah{}"}, {"foo..bar": "blah{}"}), - ( - {"foö-bar": "snöwmän"}, - {"fo-bar": "snöwmän"}, - ), - ({"route": "GET /foo"}, {"route": "GET /foo"}), - ({"__bar__": "this | or , that"}, {"__bar__": "this \\u{7c} or \\u{2c} that"}), - ({"foo/": "hello!\n\r\t\\"}, {"foo/": "hello!\\n\\r\\t\\\\"}), - ], -) -def test_metric_tag_normalization( - sentry_init, - capture_envelopes, - metric_tag, - expected_tag, - maybe_monkeypatched_threading, -): - sentry_init( - _experiments={"enable_metrics": True, "metric_code_locations": False}, - ) - envelopes = capture_envelopes() - - metrics.distribution("a", 1.0, tags=metric_tag) - - sentry_sdk.flush() - - (envelope,) = envelopes - - assert len(envelope.items) == 1 - assert envelope.items[0].headers["type"] == "statsd" - - parsed_metrics = parse_metrics(envelope.items[0].payload.get_bytes()) - assert len(parsed_metrics) == 1 - - tags = parsed_metrics[0][4] - - expected_tag_key, expected_tag_value = expected_tag.popitem() - assert expected_tag_key in tags - assert tags[expected_tag_key] == expected_tag_value - - -@minimum_python_37_with_gevent -@pytest.mark.forked -def test_before_emit_metric( - sentry_init, capture_envelopes, maybe_monkeypatched_threading -): - def before_emit(key, value, unit, tags): - if key == "removed-metric" or value == 47 or unit == "unsupported": - return False - - tags["extra"] = "foo" - del tags["release"] - # this better be a noop! - metrics.increment("shitty-recursion") - return True - - sentry_init( - release="fun-release@1.0.0", - environment="not-fun-env", - _experiments={ - "enable_metrics": True, - "metric_code_locations": False, - "before_emit_metric": before_emit, - }, - ) - envelopes = capture_envelopes() - - metrics.increment("removed-metric", 1.0) - metrics.increment("another-removed-metric", 47) - metrics.increment("yet-another-removed-metric", 1.0, unit="unsupported") - metrics.increment("actual-metric", 1.0) - sentry_sdk.flush() - - (envelope,) = envelopes - - assert len(envelope.items) == 1 - assert envelope.items[0].headers["type"] == "statsd" - m = parse_metrics(envelope.items[0].payload.get_bytes()) - - assert len(m) == 1 - assert m[0][1] == "actual-metric@none" - assert m[0][3] == ["1.0"] - assert m[0][4] == { - "extra": "foo", - "environment": "not-fun-env", - } - - -@minimum_python_37_with_gevent -@pytest.mark.forked -def test_aggregator_flush( - sentry_init, capture_envelopes, maybe_monkeypatched_threading -): - sentry_init( - release="fun-release@1.0.0", - environment="not-fun-env", - _experiments={ - "enable_metrics": True, - }, - ) - envelopes = capture_envelopes() - - metrics.increment("a-metric", 1.0) - sentry_sdk.flush() - - assert len(envelopes) == 1 - assert sentry_sdk.get_client().metrics_aggregator.buckets == {} - - -@minimum_python_37_with_gevent -@pytest.mark.forked -def test_tag_serialization( - sentry_init, capture_envelopes, maybe_monkeypatched_threading -): - sentry_init( - release="fun-release", - environment="not-fun-env", - _experiments={"enable_metrics": True, "metric_code_locations": False}, - ) - envelopes = capture_envelopes() - - metrics.increment( - "counter", - tags={ - "no-value": None, - "an-int": 42, - "a-float": 23.0, - "a-string": "blah", - "more-than-one": [1, "zwei", "3.0", None], - }, - ) - sentry_sdk.flush() - - (envelope,) = envelopes - - assert len(envelope.items) == 1 - assert envelope.items[0].headers["type"] == "statsd" - m = parse_metrics(envelope.items[0].payload.get_bytes()) - - assert len(m) == 1 - assert m[0][4] == { - "an-int": "42", - "a-float": "23.0", - "a-string": "blah", - "more-than-one": ["1", "3.0", "zwei"], - "release": "fun-release", - "environment": "not-fun-env", - } - - -@minimum_python_37_with_gevent -@pytest.mark.forked -def test_flush_recursion_protection( - sentry_init, capture_envelopes, monkeypatch, maybe_monkeypatched_threading -): - sentry_init( - release="fun-release", - environment="not-fun-env", - _experiments={"enable_metrics": True}, - ) - envelopes = capture_envelopes() - test_client = sentry_sdk.get_client() - - real_capture_envelope = test_client.transport.capture_envelope - - def bad_capture_envelope(*args, **kwargs): - metrics.increment("bad-metric") - return real_capture_envelope(*args, **kwargs) - - monkeypatch.setattr(test_client.transport, "capture_envelope", bad_capture_envelope) - - metrics.increment("counter") - - # flush twice to see the inner metric - sentry_sdk.flush() - sentry_sdk.flush() - - (envelope,) = envelopes - m = parse_metrics(envelope.items[0].payload.get_bytes()) - assert len(m) == 1 - assert m[0][1] == "counter@none" - - -@minimum_python_37_with_gevent -@pytest.mark.forked -def test_flush_recursion_protection_background_flush( - sentry_init, capture_envelopes, monkeypatch, maybe_monkeypatched_threading -): - monkeypatch.setattr(metrics.MetricsAggregator, "FLUSHER_SLEEP_TIME", 0.01) - sentry_init( - release="fun-release", - environment="not-fun-env", - _experiments={"enable_metrics": True}, - ) - envelopes = capture_envelopes() - test_client = sentry_sdk.get_client() - - real_capture_envelope = test_client.transport.capture_envelope - - def bad_capture_envelope(*args, **kwargs): - metrics.increment("bad-metric") - return real_capture_envelope(*args, **kwargs) - - monkeypatch.setattr(test_client.transport, "capture_envelope", bad_capture_envelope) - - metrics.increment("counter") - - # flush via sleep and flag - sentry_sdk.get_client().metrics_aggregator._force_flush = True - time.sleep(0.5) - - (envelope,) = envelopes - m = parse_metrics(envelope.items[0].payload.get_bytes()) - assert len(m) == 1 - assert m[0][1] == "counter@none" diff --git a/tests/test_transport.py b/tests/test_transport.py index d4522de942..6bf336e458 100644 --- a/tests/test_transport.py +++ b/tests/test_transport.py @@ -19,7 +19,7 @@ isolation_scope, get_isolation_scope, ) -from sentry_sdk.envelope import Envelope, Item, parse_json +from sentry_sdk.envelope import Envelope, parse_json from sentry_sdk.transport import ( KEEP_ALIVE_SOCKET_OPTIONS, _parse_rate_limits, @@ -536,117 +536,6 @@ def test_complex_limits_without_data_category( assert len(capturing_server.captured) == 0 -@pytest.mark.parametrize("response_code", [200, 429]) -def test_metric_bucket_limits(capturing_server, response_code, make_client): - client = make_client() - capturing_server.respond_with( - code=response_code, - headers={ - "X-Sentry-Rate-Limits": "4711:metric_bucket:organization:quota_exceeded:custom" - }, - ) - - envelope = Envelope() - envelope.add_item(Item(payload=b"{}", type="statsd")) - client.transport.capture_envelope(envelope) - client.flush() - - assert len(capturing_server.captured) == 1 - assert capturing_server.captured[0].path == "/api/132/envelope/" - capturing_server.clear_captured() - - assert set(client.transport._disabled_until) == set(["metric_bucket"]) - - client.transport.capture_envelope(envelope) - client.capture_event({"type": "transaction"}) - client.flush() - - assert len(capturing_server.captured) == 2 - - envelope = capturing_server.captured[0].envelope - assert envelope.items[0].type == "transaction" - envelope = capturing_server.captured[1].envelope - assert envelope.items[0].type == "client_report" - report = parse_json(envelope.items[0].get_bytes()) - assert report["discarded_events"] == [ - {"category": "metric_bucket", "reason": "ratelimit_backoff", "quantity": 1}, - ] - - -@pytest.mark.parametrize("response_code", [200, 429]) -def test_metric_bucket_limits_with_namespace( - capturing_server, response_code, make_client -): - client = make_client() - capturing_server.respond_with( - code=response_code, - headers={ - "X-Sentry-Rate-Limits": "4711:metric_bucket:organization:quota_exceeded:foo" - }, - ) - - envelope = Envelope() - envelope.add_item(Item(payload=b"{}", type="statsd")) - client.transport.capture_envelope(envelope) - client.flush() - - assert len(capturing_server.captured) == 1 - assert capturing_server.captured[0].path == "/api/132/envelope/" - capturing_server.clear_captured() - - assert set(client.transport._disabled_until) == set([]) - - client.transport.capture_envelope(envelope) - client.capture_event({"type": "transaction"}) - client.flush() - - assert len(capturing_server.captured) == 2 - - envelope = capturing_server.captured[0].envelope - assert envelope.items[0].type == "statsd" - envelope = capturing_server.captured[1].envelope - assert envelope.items[0].type == "transaction" - - -@pytest.mark.parametrize("response_code", [200, 429]) -def test_metric_bucket_limits_with_all_namespaces( - capturing_server, response_code, make_client -): - client = make_client() - capturing_server.respond_with( - code=response_code, - headers={ - "X-Sentry-Rate-Limits": "4711:metric_bucket:organization:quota_exceeded" - }, - ) - - envelope = Envelope() - envelope.add_item(Item(payload=b"{}", type="statsd")) - client.transport.capture_envelope(envelope) - client.flush() - - assert len(capturing_server.captured) == 1 - assert capturing_server.captured[0].path == "/api/132/envelope/" - capturing_server.clear_captured() - - assert set(client.transport._disabled_until) == set(["metric_bucket"]) - - client.transport.capture_envelope(envelope) - client.capture_event({"type": "transaction"}) - client.flush() - - assert len(capturing_server.captured) == 2 - - envelope = capturing_server.captured[0].envelope - assert envelope.items[0].type == "transaction" - envelope = capturing_server.captured[1].envelope - assert envelope.items[0].type == "client_report" - report = parse_json(envelope.items[0].get_bytes()) - assert report["discarded_events"] == [ - {"category": "metric_bucket", "reason": "ratelimit_backoff", "quantity": 1}, - ] - - @pytest.mark.parametrize("quantity", (1, 2, 10)) def test_record_lost_event_quantity(capturing_server, make_client, quantity): client = make_client() From 682b65288d78701038d13fb502ba7158343efeed Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Wed, 11 Sep 2024 17:06:58 +0200 Subject: [PATCH 039/244] Store Baggage object on PropagationContext instead of DSC hash (#3523) --- sentry_sdk/scope.py | 35 +++++++++++++---------------------- sentry_sdk/tracing_utils.py | 20 ++++++++++++-------- 2 files changed, 25 insertions(+), 30 deletions(-) diff --git a/sentry_sdk/scope.py b/sentry_sdk/scope.py index f9ed9e4474..75a39ab9ba 100644 --- a/sentry_sdk/scope.py +++ b/sentry_sdk/scope.py @@ -476,18 +476,12 @@ def get_dynamic_sampling_context(self): # type: () -> Optional[Dict[str, str]] """ Returns the Dynamic Sampling Context from the Propagation Context. - If not existing, creates a new one. """ - if self._propagation_context is None: - return None - - baggage = self.get_baggage() - if baggage is not None: - self._propagation_context.dynamic_sampling_context = ( - baggage.dynamic_sampling_context() - ) - - return self._propagation_context.dynamic_sampling_context + return ( + self._propagation_context.dynamic_sampling_context + if self._propagation_context + else None + ) def get_traceparent(self, *args, **kwargs): # type: (Any, Any) -> Optional[str] @@ -517,6 +511,7 @@ def get_baggage(self, *args, **kwargs): """ Returns the Sentry "baggage" header containing trace information from the currently active span or the scopes Propagation Context. + If not existing, creates a new one. """ client = self.get_client() @@ -525,14 +520,11 @@ def get_baggage(self, *args, **kwargs): return self.span.to_baggage() # If this scope has a propagation context, return baggage from there + # populate a fresh one if it doesn't exist if self._propagation_context is not None: - dynamic_sampling_context = ( - self._propagation_context.dynamic_sampling_context - ) - if dynamic_sampling_context is None: - return Baggage.from_options(self) - else: - return Baggage(dynamic_sampling_context) + if self._propagation_context.baggage is None: + self._propagation_context.baggage = Baggage.from_options(self) + return self._propagation_context.baggage # Fall back to isolation scope's baggage. It always has one return self.get_isolation_scope().get_baggage() @@ -594,10 +586,9 @@ def iter_headers(self): if traceparent is not None: yield SENTRY_TRACE_HEADER_NAME, traceparent - dsc = self.get_dynamic_sampling_context() - if dsc is not None: - baggage = Baggage(dsc).serialize() - yield BAGGAGE_HEADER_NAME, baggage + baggage = self.get_baggage() + if baggage is not None: + yield BAGGAGE_HEADER_NAME, baggage.serialize() def iter_trace_propagation_headers(self, *args, **kwargs): # type: (Any, Any) -> Generator[Tuple[str, str], None, None] diff --git a/sentry_sdk/tracing_utils.py b/sentry_sdk/tracing_utils.py index 8af17559e9..a221ee140e 100644 --- a/sentry_sdk/tracing_utils.py +++ b/sentry_sdk/tracing_utils.py @@ -337,7 +337,7 @@ class PropagationContext: "_span_id", "parent_span_id", "parent_sampled", - "dynamic_sampling_context", + "baggage", ) def __init__( @@ -346,7 +346,7 @@ def __init__( span_id=None, # type: Optional[str] parent_span_id=None, # type: Optional[str] parent_sampled=None, # type: Optional[bool] - dynamic_sampling_context=None, # type: Optional[Dict[str, str]] + baggage=None, # type: Optional[Baggage] ): # type: (...) -> None self._trace_id = trace_id @@ -364,8 +364,13 @@ def __init__( Important when the parent span originated in an upstream service, because we watn to sample the whole trace, or nothing from the trace.""" - self.dynamic_sampling_context = dynamic_sampling_context - """Data that is used for dynamic sampling decisions.""" + self.baggage = baggage + """Baggage object used for dynamic sampling decisions.""" + + @property + def dynamic_sampling_context(self): + # type: () -> Optional[Dict[str, str]] + return self.baggage.dynamic_sampling_context() if self.baggage else None @classmethod def from_incoming_data(cls, incoming_data): @@ -376,9 +381,7 @@ def from_incoming_data(cls, incoming_data): baggage_header = normalized_data.get(BAGGAGE_HEADER_NAME) if baggage_header: propagation_context = PropagationContext() - propagation_context.dynamic_sampling_context = Baggage.from_incoming_header( - baggage_header - ).dynamic_sampling_context() + propagation_context.baggage = Baggage.from_incoming_header(baggage_header) sentry_trace_header = normalized_data.get(SENTRY_TRACE_HEADER_NAME) if sentry_trace_header: @@ -431,11 +434,12 @@ def update(self, other_dict): def __repr__(self): # type: (...) -> str - return "".format( + return "".format( self._trace_id, self._span_id, self.parent_span_id, self.parent_sampled, + self.baggage, self.dynamic_sampling_context, ) From b27a43f2f5ab102c7f09e4cd364d35b51f6c1e89 Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Wed, 11 Sep 2024 17:23:49 +0200 Subject: [PATCH 040/244] Add propagation context change to migration guide --- MIGRATION_GUIDE.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/MIGRATION_GUIDE.md b/MIGRATION_GUIDE.md index 2be1ad892b..2a20350121 100644 --- a/MIGRATION_GUIDE.md +++ b/MIGRATION_GUIDE.md @@ -32,7 +32,8 @@ Looking to upgrade from Sentry SDK 2.x to 3.x? Here's a comprehensive list of wh - Utility function `is_auto_session_tracking_enabled_scope()` has been removed. There is no public replacement. There is a private `_is_auto_session_tracking_enabled()` (if you absolutely need this function) - Setting `scope.level` has been removed. Use `scope.set_level` instead. - `span.containing_transaction` has been removed. Use `span.root_span` instead. -- `continue_from_headers`, `continue_from_environ` and `from_traceparent` have been removed, please use top-level API `sentry_sdk.continue_trace` instead.` +- `continue_from_headers`, `continue_from_environ` and `from_traceparent` have been removed, please use top-level API `sentry_sdk.continue_trace` instead. +- `PropagationContext` constructor no longer takes a `dynamic_sampling_context` but takes a `baggage` object instead. ### Deprecated From a4bc9decd63f468b0b7614001e7a3d3fc5b8b530 Mon Sep 17 00:00:00 2001 From: Ivana Kellyer Date: Thu, 12 Sep 2024 08:30:43 +0200 Subject: [PATCH 041/244] Use POTel `use_scope`, `use_isolation_scope` (#3522) * use potelscope * Use POTel use_{isolation_}scope * use from top level api --- sentry_sdk/api.py | 4 ++++ sentry_sdk/integrations/threading.py | 5 ++--- sentry_sdk/integrations/wsgi.py | 5 ++--- 3 files changed, 8 insertions(+), 6 deletions(-) diff --git a/sentry_sdk/api.py b/sentry_sdk/api.py index db0ce275fd..7d06abf660 100644 --- a/sentry_sdk/api.py +++ b/sentry_sdk/api.py @@ -11,6 +11,8 @@ PotelScope as Scope, new_scope, isolation_scope, + use_scope, + use_isolation_scope, ) @@ -77,6 +79,8 @@ "start_transaction", "trace", "monitor", + "use_scope", + "use_isolation_scope", ] diff --git a/sentry_sdk/integrations/threading.py b/sentry_sdk/integrations/threading.py index c729e208a5..a0444892fb 100644 --- a/sentry_sdk/integrations/threading.py +++ b/sentry_sdk/integrations/threading.py @@ -4,7 +4,6 @@ import sentry_sdk from sentry_sdk.integrations import Integration -from sentry_sdk.scope import use_isolation_scope, use_scope from sentry_sdk.utils import ( ensure_integration_enabled, event_from_exception, @@ -95,8 +94,8 @@ def _run_old_run_func(): reraise(*_capture_exception()) if isolation_scope_to_use is not None and current_scope_to_use is not None: - with use_isolation_scope(isolation_scope_to_use): - with use_scope(current_scope_to_use): + with sentry_sdk.use_isolation_scope(isolation_scope_to_use): + with sentry_sdk.use_scope(current_scope_to_use): return _run_old_run_func() else: return _run_old_run_func() diff --git a/sentry_sdk/integrations/wsgi.py b/sentry_sdk/integrations/wsgi.py index bfd303235e..15c72ede8a 100644 --- a/sentry_sdk/integrations/wsgi.py +++ b/sentry_sdk/integrations/wsgi.py @@ -7,7 +7,6 @@ from sentry_sdk.scope import should_send_default_pii from sentry_sdk.integrations._wsgi_common import _filter_headers from sentry_sdk.sessions import track_session -from sentry_sdk.scope import use_isolation_scope from sentry_sdk.tracing import Transaction, TRANSACTION_SOURCE_ROUTE from sentry_sdk.utils import ( ContextVar, @@ -219,7 +218,7 @@ def __iter__(self): iterator = iter(self._response) while True: - with use_isolation_scope(self._scope): + with sentry_sdk.use_isolation_scope(self._scope): try: chunk = next(iterator) except StopIteration: @@ -231,7 +230,7 @@ def __iter__(self): def close(self): # type: () -> None - with use_isolation_scope(self._scope): + with sentry_sdk.use_isolation_scope(self._scope): try: self._response.close() # type: ignore except AttributeError: From 13441e37ee76ad5a9c32fb9a5a140f17f7961452 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Thu, 12 Sep 2024 13:03:27 +0200 Subject: [PATCH 042/244] Make sure op/name/description are set correctly on transactions/spans (#3519) Make sure that `op`, `name`, `description` are set correctly on `Span`s and `Transaction`s. --- .../integrations/opentelemetry/scope.py | 9 +++ .../integrations/opentelemetry/utils.py | 3 +- sentry_sdk/tracing.py | 2 +- .../integrations/opentelemetry/test_compat.py | 56 +++++++++++++++++++ 4 files changed, 68 insertions(+), 2 deletions(-) create mode 100644 tests/integrations/opentelemetry/test_compat.py diff --git a/sentry_sdk/integrations/opentelemetry/scope.py b/sentry_sdk/integrations/opentelemetry/scope.py index 40eb47c01f..a3eb1f3268 100644 --- a/sentry_sdk/integrations/opentelemetry/scope.py +++ b/sentry_sdk/integrations/opentelemetry/scope.py @@ -1,3 +1,5 @@ +import warnings + from typing import cast from contextlib import contextmanager @@ -115,6 +117,13 @@ def start_transaction(self, custom_sampling_context=None, **kwargs): def start_span(self, custom_sampling_context=None, **kwargs): # type: (Optional[SamplingContext], Any) -> POTelSpan + if kwargs.get("description") is not None: + warnings.warn( + "The `description` parameter is deprecated. Please use `name` instead.", + DeprecationWarning, + stacklevel=2, + ) + return POTelSpan(**kwargs, scope=self) diff --git a/sentry_sdk/integrations/opentelemetry/utils.py b/sentry_sdk/integrations/opentelemetry/utils.py index afa42ea772..2d16a0835d 100644 --- a/sentry_sdk/integrations/opentelemetry/utils.py +++ b/sentry_sdk/integrations/opentelemetry/utils.py @@ -96,8 +96,9 @@ def extract_span_data(span): if span.attributes is None: return (op, description, status, http_status, origin) - origin = span.attributes.get(SentrySpanAttribute.ORIGIN) + op = span.attributes.get(SentrySpanAttribute.OP) or op description = span.attributes.get(SentrySpanAttribute.DESCRIPTION) or description + origin = span.attributes.get(SentrySpanAttribute.ORIGIN) http_method = span.attributes.get(SpanAttributes.HTTP_METHOD) http_method = cast("Optional[str]", http_method) diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py index bbe7fec1cb..fec1eb4a59 100644 --- a/sentry_sdk/tracing.py +++ b/sentry_sdk/tracing.py @@ -1221,7 +1221,7 @@ def __init__( start_timestamp = convert_to_otel_timestamp(start_timestamp) self._otel_span = tracer.start_span( - description or op or "", start_time=start_timestamp + name or description or op or "", start_time=start_timestamp ) self.origin = origin or DEFAULT_SPAN_ORIGIN diff --git a/tests/integrations/opentelemetry/test_compat.py b/tests/integrations/opentelemetry/test_compat.py new file mode 100644 index 0000000000..ece08ec900 --- /dev/null +++ b/tests/integrations/opentelemetry/test_compat.py @@ -0,0 +1,56 @@ +import sentry_sdk + + +def test_transaction_name_span_description_compat( + sentry_init, + capture_events, +): + sentry_init(traces_sample_rate=1.0) + + events = capture_events() + + with sentry_sdk.start_transaction( + name="trx-name", + op="trx-op", + ) as trx: + with sentry_sdk.start_span( + description="span-desc", + op="span-op", + ) as spn: + ... + + assert trx.__class__.__name__ == "POTelSpan" + assert trx.op == "trx-op" + assert trx.name == "trx-name" + assert trx.description is None + + assert trx._otel_span is not None + assert trx._otel_span.name == "trx-name" + assert trx._otel_span.attributes["sentry.op"] == "trx-op" + assert trx._otel_span.attributes["sentry.name"] == "trx-name" + assert "sentry.description" not in trx._otel_span.attributes + + assert spn.__class__.__name__ == "POTelSpan" + assert spn.op == "span-op" + assert spn.description == "span-desc" + assert spn.name is None + + assert spn._otel_span is not None + assert spn._otel_span.name == "span-desc" + assert spn._otel_span.attributes["sentry.op"] == "span-op" + assert spn._otel_span.attributes["sentry.description"] == "span-desc" + assert "sentry.name" not in spn._otel_span.attributes + + transaction = events[0] + assert transaction["transaction"] == "trx-name" + assert transaction["contexts"]["trace"]["op"] == "trx-op" + assert transaction["contexts"]["trace"]["data"]["sentry.op"] == "trx-op" + assert transaction["contexts"]["trace"]["data"]["sentry.name"] == "trx-name" + assert "sentry.description" not in transaction["contexts"]["trace"]["data"] + + span = transaction["spans"][0] + assert span["description"] == "span-desc" + assert span["op"] == "span-op" + assert span["data"]["sentry.op"] == "span-op" + assert span["data"]["sentry.description"] == "span-desc" + assert "sentry.name" not in span["data"] From d1de481c88fa0daab6a6d99eb8d896824092eff1 Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Tue, 24 Sep 2024 13:53:26 +0200 Subject: [PATCH 043/244] Make span.finish work (#3559) --- sentry_sdk/tracing.py | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py index fec1eb4a59..775bdfaa7e 100644 --- a/sentry_sdk/tracing.py +++ b/sentry_sdk/tracing.py @@ -1268,7 +1268,7 @@ def __enter__(self): def __exit__(self, ty, value, tb): # type: (Optional[Any], Optional[Any], Optional[Any]) -> None - self._otel_span.end() + self.finish() # XXX set status to error if unset and an exception occurred? context.detach(self._ctx_token) @@ -1524,9 +1524,16 @@ def is_success(self): # type: () -> bool return self._otel_span.status.code == StatusCode.OK - def finish(self, scope=None, end_timestamp=None): - # type: (Optional[sentry_sdk.Scope], Optional[Union[float, datetime]]) -> Optional[str] - pass + def finish(self, end_timestamp=None): + # type: (Optional[Union[float, datetime]]) -> Optional[str] + if end_timestamp is not None: + from sentry_sdk.integrations.opentelemetry.utils import ( + convert_to_otel_timestamp, + ) + + self._otel_span.end(convert_to_otel_timestamp(end_timestamp)) + else: + self._otel_span.end() def to_json(self): # type: () -> dict[str, Any] From 0d60d45d47cfcf632d0703441c937a737b00fa05 Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Thu, 26 Sep 2024 17:01:42 +0200 Subject: [PATCH 044/244] Make DSC work for outgoing traces (#3566) * Add parsed baggage sentry items as items on the `sampling_context.trace_state` in `continue_trace` * Fix sampler to propagate the `trace_state` to children in both sampled and dropped cases * make `iter_headers` work * make `get_trace_context` work * add `dynamic_sampling_context` in `trace_context` so that it can be picked up by the client and added in the envelope header --- sentry_sdk/_types.py | 2 + .../opentelemetry/potel_span_processor.py | 23 ++--- .../integrations/opentelemetry/sampler.py | 37 ++++---- .../integrations/opentelemetry/scope.py | 13 +-- .../integrations/opentelemetry/utils.py | 84 +++++++++++++++++-- sentry_sdk/tracing.py | 23 ++++- 6 files changed, 130 insertions(+), 52 deletions(-) diff --git a/sentry_sdk/_types.py b/sentry_sdk/_types.py index 0ee3921862..19446f0f86 100644 --- a/sentry_sdk/_types.py +++ b/sentry_sdk/_types.py @@ -197,3 +197,5 @@ class SDKInfo(TypedDict): ) HttpStatusCodeRange = Union[int, Container[int]] + + OtelExtractedSpanData = tuple[str, str, Optional[str], Optional[int], Optional[str]] diff --git a/sentry_sdk/integrations/opentelemetry/potel_span_processor.py b/sentry_sdk/integrations/opentelemetry/potel_span_processor.py index d61b5f8782..63a9acb9db 100644 --- a/sentry_sdk/integrations/opentelemetry/potel_span_processor.py +++ b/sentry_sdk/integrations/opentelemetry/potel_span_processor.py @@ -18,6 +18,7 @@ convert_from_otel_timestamp, extract_span_attributes, extract_span_data, + get_trace_context, ) from sentry_sdk.integrations.opentelemetry.consts import ( OTEL_SENTRY_CONTEXT, @@ -136,26 +137,12 @@ def _root_span_to_transaction_event(self, span): if event is None: return None - trace_id = format_trace_id(span.context.trace_id) - span_id = format_span_id(span.context.span_id) - parent_span_id = format_span_id(span.parent.span_id) if span.parent else None - - (op, description, status, _, origin) = extract_span_data(span) - - trace_context = { - "trace_id": trace_id, - "span_id": span_id, - "origin": origin or DEFAULT_SPAN_ORIGIN, - "op": op, - "status": status, - } # type: dict[str, Any] - - if parent_span_id: - trace_context["parent_span_id"] = parent_span_id - if span.attributes: - trace_context["data"] = dict(span.attributes) + span_data = extract_span_data(span) + (_, description, _, _, _) = span_data + trace_context = get_trace_context(span, span_data=span_data) contexts = {"trace": trace_context} + if span.resource.attributes: contexts[OTEL_SENTRY_CONTEXT] = {"resource": dict(span.resource.attributes)} diff --git a/sentry_sdk/integrations/opentelemetry/sampler.py b/sentry_sdk/integrations/opentelemetry/sampler.py index 445c2edd02..5fa7c9e1e8 100644 --- a/sentry_sdk/integrations/opentelemetry/sampler.py +++ b/sentry_sdk/integrations/opentelemetry/sampler.py @@ -41,16 +41,21 @@ def get_parent_sampled(parent_context, trace_id): return None -def dropped(parent_context=None): - # type: (Optional[SpanContext]) -> SamplingResult - trace_state = parent_context.trace_state if parent_context is not None else None - updated_trace_context = trace_state or TraceState() - updated_trace_context = updated_trace_context.update( - SENTRY_TRACE_STATE_DROPPED, "true" - ) +def dropped_result(span_context): + # type: (SpanContext) -> SamplingResult + trace_state = span_context.trace_state.update(SENTRY_TRACE_STATE_DROPPED, "true") + return SamplingResult( Decision.DROP, - trace_state=updated_trace_context, + trace_state=trace_state, + ) + + +def sampled_result(span_context): + # type: (SpanContext) -> SamplingResult + return SamplingResult( + Decision.RECORD_AND_SAMPLE, + trace_state=span_context.trace_state, ) @@ -68,12 +73,11 @@ def should_sample( # type: (...) -> SamplingResult client = sentry_sdk.get_client() - parent_span = trace.get_current_span(parent_context) - parent_context = parent_span.get_span_context() if parent_span else None + parent_span_context = trace.get_current_span(parent_context).get_span_context() # No tracing enabled, thus no sampling if not has_tracing_enabled(client.options): - return dropped(parent_context) + return dropped_result(parent_span_context) sample_rate = None @@ -89,14 +93,14 @@ def should_sample( "transaction_context": { "name": name, }, - "parent_sampled": get_parent_sampled(parent_context, trace_id), + "parent_sampled": get_parent_sampled(parent_span_context, trace_id), } sample_rate = client.options["traces_sampler"](sampling_context) else: # Check if there is a parent with a sampling decision - parent_sampled = get_parent_sampled(parent_context, trace_id) + parent_sampled = get_parent_sampled(parent_span_context, trace_id) if parent_sampled is not None: sample_rate = parent_sampled else: @@ -108,15 +112,16 @@ def should_sample( logger.warning( f"[Tracing] Discarding {name} because of invalid sample rate." ) - return dropped(parent_context) + return dropped_result(parent_span_context) # Roll the dice on sample rate sampled = random() < float(sample_rate) + # TODO-neel-potel set sample rate as attribute for DSC if sampled: - return SamplingResult(Decision.RECORD_AND_SAMPLE) + return sampled_result(parent_span_context) else: - return dropped(parent_context) + return dropped_result(parent_span_context) def get_description(self) -> str: return self.__class__.__name__ diff --git a/sentry_sdk/integrations/opentelemetry/scope.py b/sentry_sdk/integrations/opentelemetry/scope.py index a3eb1f3268..8d8c977b35 100644 --- a/sentry_sdk/integrations/opentelemetry/scope.py +++ b/sentry_sdk/integrations/opentelemetry/scope.py @@ -4,7 +4,7 @@ from contextlib import contextmanager from opentelemetry.context import get_value, set_value, attach, detach, get_current -from opentelemetry.trace import SpanContext, NonRecordingSpan, TraceFlags, use_span +from opentelemetry.trace import SpanContext, NonRecordingSpan, TraceFlags, TraceState, use_span from sentry_sdk.integrations.opentelemetry.consts import ( SENTRY_SCOPES_KEY, @@ -12,6 +12,7 @@ SENTRY_USE_CURRENT_SCOPE_KEY, SENTRY_USE_ISOLATION_SCOPE_KEY, ) +from sentry_sdk.integrations.opentelemetry.utils import trace_state_from_baggage from sentry_sdk.scope import Scope, ScopeType from sentry_sdk.tracing import POTelSpan from sentry_sdk._types import TYPE_CHECKING @@ -93,15 +94,17 @@ def _incoming_otel_span_context(self): else TraceFlags.DEFAULT ) - # TODO-neel-potel tracestate + # TODO-neel-potel do we need parent and sampled like JS? + trace_state = None + if self._propagation_context.baggage: + trace_state = trace_state_from_baggage(self._propagation_context.baggage) + span_context = SpanContext( trace_id=int(self._propagation_context.trace_id, 16), # type: ignore span_id=int(self._propagation_context.parent_span_id, 16), # type: ignore is_remote=True, trace_flags=trace_flags, - # TODO-anton: add trace_state (mapping[str,str]) with the parentSpanId, dsc and sampled from self._propagation_context - # trace_state={ - # } + trace_state=trace_state, ) return span_context diff --git a/sentry_sdk/integrations/opentelemetry/utils.py b/sentry_sdk/integrations/opentelemetry/utils.py index 2d16a0835d..571389fb60 100644 --- a/sentry_sdk/integrations/opentelemetry/utils.py +++ b/sentry_sdk/integrations/opentelemetry/utils.py @@ -1,20 +1,24 @@ +import re from typing import cast from datetime import datetime, timezone -from opentelemetry.trace import SpanKind, StatusCode +from urllib3.util import parse_url as urlparse +from urllib.parse import quote +from opentelemetry.trace import Span, SpanKind, StatusCode, format_trace_id, format_span_id, TraceState from opentelemetry.semconv.trace import SpanAttributes from opentelemetry.sdk.trace import ReadableSpan -from sentry_sdk.consts import SPANSTATUS -from sentry_sdk.tracing import get_span_status_from_http_code -from sentry_sdk.integrations.opentelemetry.consts import SentrySpanAttribute -from urllib3.util import parse_url as urlparse from sentry_sdk.utils import Dsn +from sentry_sdk.consts import SPANSTATUS +from sentry_sdk.tracing import get_span_status_from_http_code, DEFAULT_SPAN_ORIGIN +from sentry_sdk.tracing_utils import Baggage +from sentry_sdk.integrations.opentelemetry.consts import SentrySpanAttribute from sentry_sdk._types import TYPE_CHECKING if TYPE_CHECKING: - from typing import Any, Optional, Mapping, Sequence, Union + from typing import Any, Optional, Mapping, Sequence, Union, ItemsView + from sentry_sdk._types import OtelExtractedSpanData GRPC_ERROR_MAP = { @@ -87,7 +91,7 @@ def convert_to_otel_timestamp(time): def extract_span_data(span): - # type: (ReadableSpan) -> tuple[str, str, Optional[str], Optional[int], Optional[str]] + # type: (ReadableSpan) -> OtelExtractedSpanData op = span.name description = span.name status, http_status = extract_span_status(span) @@ -125,7 +129,7 @@ def extract_span_data(span): def span_data_for_http_method(span): - # type: (ReadableSpan) -> tuple[str, str, Optional[str], Optional[int], Optional[str]] + # type: (ReadableSpan) -> OtelExtractedSpanData span_attributes = span.attributes or {} op = "http" @@ -167,7 +171,7 @@ def span_data_for_http_method(span): def span_data_for_db_query(span): - # type: (ReadableSpan) -> tuple[str, str, Optional[str], Optional[int], Optional[str]] + # type: (ReadableSpan) -> OtelExtractedSpanData span_attributes = span.attributes or {} op = "db" @@ -261,3 +265,65 @@ def extract_span_attributes(span, namespace): extracted_attrs[key] = value return extracted_attrs + + +def get_trace_context(span, span_data=None): + # type: (ReadableSpan, Optional[OtelExtractedSpanData]) -> dict[str, Any] + if not span.context: + return {} + + trace_id = format_trace_id(span.context.trace_id) + span_id = format_span_id(span.context.span_id) + parent_span_id = format_span_id(span.parent.span_id) if span.parent else None + + if span_data is None: + span_data = extract_span_data(span) + + (op, _, status, _, origin) = span_data + + trace_context = { + "trace_id": trace_id, + "span_id": span_id, + "parent_span_id": parent_span_id, + "op": op, + "origin": origin or DEFAULT_SPAN_ORIGIN, + "status": status, + } # type: dict[str, Any] + + if span.attributes: + trace_context["data"] = dict(span.attributes) + + trace_context["dynamic_sampling_context"] = dsc_from_trace_state(span.context.trace_state) + + # TODO-neel-potel profiler thread_id, thread_name + + return trace_context + + +def trace_state_from_baggage(baggage): + # type: (Baggage) -> TraceState + items = [] + for k, v in baggage.sentry_items.items(): + key = Baggage.SENTRY_PREFIX + quote(k) + val = quote(str(v)) + items.append((key, val)) + return TraceState(items) + + +def serialize_trace_state(trace_state): + # type: (TraceState) -> str + sentry_items = [] + for k, v in trace_state.items(): + if Baggage.SENTRY_PREFIX_REGEX.match(k): + sentry_items.append((k, v)) + return ",".join(key + "=" + value for key, value in sentry_items) + + +def dsc_from_trace_state(trace_state): + # type: (TraceState) -> dict[str, str] + dsc = {} + for k, v in trace_state.items(): + if Baggage.SENTRY_PREFIX_REGEX.match(k): + key = re.sub(Baggage.SENTRY_PREFIX_REGEX, "", k) + dsc[key] = v + return dsc diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py index 3c76ef7a00..d085fa81b5 100644 --- a/sentry_sdk/tracing.py +++ b/sentry_sdk/tracing.py @@ -1,7 +1,7 @@ import uuid import random import time -import warnings +import warnings from datetime import datetime, timedelta, timezone from opentelemetry import trace as otel_trace, context @@ -1447,7 +1447,14 @@ def start_child(self, **kwargs): def iter_headers(self): # type: () -> Iterator[Tuple[str, str]] - pass + yield SENTRY_TRACE_HEADER_NAME, self.to_traceparent() + + from sentry_sdk.integrations.opentelemetry.utils import ( + serialize_trace_state, + ) + + trace_state = self._otel_span.get_span_context().trace_state + yield BAGGAGE_HEADER_NAME, serialize_trace_state(trace_state) def to_traceparent(self): # type: () -> str @@ -1466,6 +1473,7 @@ def to_traceparent(self): def to_baggage(self): # type: () -> Optional[Baggage] + # TODO-neel-potel head SDK populate baggage mess pass def set_tag(self, key, value): @@ -1540,8 +1548,15 @@ def to_json(self): pass def get_trace_context(self): - # type: () -> Any - pass + # type: () -> dict[str, Any] + if not isinstance(self._otel_span, ReadableSpan): + return {} + + from sentry_sdk.integrations.opentelemetry.utils import ( + get_trace_context, + ) + + return get_trace_context(self._otel_span) def get_profile_context(self): # type: () -> Optional[ProfileContext] From ed8b45258d5083871f5af033ad7bba1ccabdf5fe Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Mon, 30 Sep 2024 10:06:28 +0200 Subject: [PATCH 045/244] Fix PropagationContext test (#3570) --- tests/test_propagationcontext.py | 17 ++++++++++------- 1 file changed, 10 insertions(+), 7 deletions(-) diff --git a/tests/test_propagationcontext.py b/tests/test_propagationcontext.py index c650071511..9479518552 100644 --- a/tests/test_propagationcontext.py +++ b/tests/test_propagationcontext.py @@ -1,4 +1,4 @@ -from sentry_sdk.tracing_utils import PropagationContext +from sentry_sdk.tracing_utils import Baggage, PropagationContext def test_empty_context(): @@ -16,23 +16,26 @@ def test_empty_context(): def test_context_with_values(): + baggage = Baggage( + sentry_items={ + "sentry-trace": "1234567890abcdef1234567890abcdef-1234567890abcdef-1" + }, + third_party_items={"foo": "bar"}, + mutable=False, + ) ctx = PropagationContext( trace_id="1234567890abcdef1234567890abcdef", span_id="1234567890abcdef", parent_span_id="abcdef1234567890", parent_sampled=True, - dynamic_sampling_context={ - "foo": "bar", - }, + baggage=baggage, ) assert ctx.trace_id == "1234567890abcdef1234567890abcdef" assert ctx.span_id == "1234567890abcdef" assert ctx.parent_span_id == "abcdef1234567890" assert ctx.parent_sampled - assert ctx.dynamic_sampling_context == { - "foo": "bar", - } + assert ctx.baggage == baggage def test_lacy_uuids(): From 3bf8b13fd87a677909481eecb1af64faaff26376 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Mon, 30 Sep 2024 10:07:16 +0200 Subject: [PATCH 046/244] Use freezegun to make spans of a certain duration (#3571) --- requirements-testing.txt | 1 + tests/integrations/asyncpg/test_asyncpg.py | 27 ++++++++++++------- .../integrations/django/test_db_query_data.py | 21 +++++++++------ .../sqlalchemy/test_sqlalchemy.py | 21 +++++++++------ 4 files changed, 44 insertions(+), 26 deletions(-) diff --git a/requirements-testing.txt b/requirements-testing.txt index 95c015f806..29185631cd 100644 --- a/requirements-testing.txt +++ b/requirements-testing.txt @@ -11,3 +11,4 @@ asttokens responses pysocks setuptools +freezegun \ No newline at end of file diff --git a/tests/integrations/asyncpg/test_asyncpg.py b/tests/integrations/asyncpg/test_asyncpg.py index e36d15c5d2..8996c8dd1a 100644 --- a/tests/integrations/asyncpg/test_asyncpg.py +++ b/tests/integrations/asyncpg/test_asyncpg.py @@ -26,6 +26,7 @@ import pytest import pytest_asyncio from asyncpg import connect, Connection +from freezegun import freeze_time from sentry_sdk import capture_message, start_transaction from sentry_sdk.integrations.asyncpg import AsyncPGIntegration @@ -653,11 +654,14 @@ async def test_no_query_source_if_duration_too_short(sentry_init, capture_events @contextmanager def fake_record_sql_queries(*args, **kwargs): - with record_sql_queries(*args, **kwargs) as span: - pass - span.start_timestamp = datetime.datetime(2024, 1, 1, microsecond=0) - span.timestamp = datetime.datetime(2024, 1, 1, microsecond=99999) - yield span + with freeze_time(datetime(2024, 1, 1, microsecond=0)): + with record_sql_queries(*args, **kwargs) as span: + freezer = freeze_time(datetime(2024, 1, 1, microsecond=99999)) + freezer.start() + + freezer.stop() + + yield span with mock.patch( "sentry_sdk.integrations.asyncpg.record_sql_queries", @@ -698,11 +702,14 @@ async def test_query_source_if_duration_over_threshold(sentry_init, capture_even @contextmanager def fake_record_sql_queries(*args, **kwargs): - with record_sql_queries(*args, **kwargs) as span: - pass - span.start_timestamp = datetime.datetime(2024, 1, 1, microsecond=0) - span.timestamp = datetime.datetime(2024, 1, 1, microsecond=100001) - yield span + with freeze_time(datetime(2024, 1, 1, microsecond=0)): + with record_sql_queries(*args, **kwargs) as span: + freezer = freeze_time(datetime(2024, 1, 1, microsecond=100001)) + freezer.start() + + freezer.stop() + + yield span with mock.patch( "sentry_sdk.integrations.asyncpg.record_sql_queries", diff --git a/tests/integrations/django/test_db_query_data.py b/tests/integrations/django/test_db_query_data.py index 41ad9d5e1c..6e49f61085 100644 --- a/tests/integrations/django/test_db_query_data.py +++ b/tests/integrations/django/test_db_query_data.py @@ -12,6 +12,7 @@ except ImportError: from django.core.urlresolvers import reverse +from freezegun import freeze_time from werkzeug.test import Client from sentry_sdk import start_transaction @@ -348,11 +349,13 @@ def test_no_query_source_if_duration_too_short(sentry_init, client, capture_even class fake_record_sql_queries: # noqa: N801 def __init__(self, *args, **kwargs): - with record_sql_queries(*args, **kwargs) as span: - self.span = span + with freeze_time(datetime(2024, 1, 1, microsecond=0)): + with record_sql_queries(*args, **kwargs) as span: + self.span = span + freezer = freeze_time(datetime(2024, 1, 1, microsecond=99999)) + freezer.start() - self.span.start_timestamp = datetime(2024, 1, 1, microsecond=0) - self.span.timestamp = datetime(2024, 1, 1, microsecond=99999) + freezer.stop() def __enter__(self): return self.span @@ -406,11 +409,13 @@ def test_query_source_if_duration_over_threshold(sentry_init, client, capture_ev class fake_record_sql_queries: # noqa: N801 def __init__(self, *args, **kwargs): - with record_sql_queries(*args, **kwargs) as span: - self.span = span + with freeze_time(datetime(2024, 1, 1, microsecond=0)): + with record_sql_queries(*args, **kwargs) as span: + self.span = span + freezer = freeze_time(datetime(2024, 1, 1, microsecond=99999)) + freezer.start() - self.span.start_timestamp = datetime(2024, 1, 1, microsecond=0) - self.span.timestamp = datetime(2024, 1, 1, microsecond=101000) + freezer.stop() def __enter__(self): return self.span diff --git a/tests/integrations/sqlalchemy/test_sqlalchemy.py b/tests/integrations/sqlalchemy/test_sqlalchemy.py index 2b95fe02d4..84657d8c8f 100644 --- a/tests/integrations/sqlalchemy/test_sqlalchemy.py +++ b/tests/integrations/sqlalchemy/test_sqlalchemy.py @@ -3,6 +3,7 @@ from unittest import mock import pytest +from freezegun import freeze_time from sqlalchemy import Column, ForeignKey, Integer, String, create_engine from sqlalchemy.exc import IntegrityError from sqlalchemy.ext.declarative import declarative_base @@ -553,11 +554,13 @@ class Person(Base): class fake_record_sql_queries: # noqa: N801 def __init__(self, *args, **kwargs): - with record_sql_queries(*args, **kwargs) as span: - self.span = span + with freeze_time(datetime(2024, 1, 1, microsecond=0)): + with record_sql_queries(*args, **kwargs) as span: + self.span = span + freezer = freeze_time(datetime(2024, 1, 1, microsecond=99999)) + freezer.start() - self.span.start_timestamp = datetime(2024, 1, 1, microsecond=0) - self.span.timestamp = datetime(2024, 1, 1, microsecond=99999) + freezer.stop() def __enter__(self): return self.span @@ -619,11 +622,13 @@ class Person(Base): class fake_record_sql_queries: # noqa: N801 def __init__(self, *args, **kwargs): - with record_sql_queries(*args, **kwargs) as span: - self.span = span + with freeze_time(datetime(2024, 1, 1, microsecond=0)): + with record_sql_queries(*args, **kwargs) as span: + self.span = span + freezer = freeze_time(datetime(2024, 1, 1, microsecond=99999)) + freezer.start() - self.span.start_timestamp = datetime(2024, 1, 1, microsecond=0) - self.span.timestamp = datetime(2024, 1, 1, microsecond=101000) + freezer.stop() def __enter__(self): return self.span From 3ef267c3ee524524f73e31352e03ad4ec79bb6da Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Mon, 30 Sep 2024 10:16:15 +0200 Subject: [PATCH 047/244] Removed datetime_from_timezone (#3569) --- tests/test_basics.py | 39 +++++++++++++++++----------------- tests/test_utils.py | 50 -------------------------------------------- 2 files changed, 20 insertions(+), 69 deletions(-) diff --git a/tests/test_basics.py b/tests/test_basics.py index d287a9f2f5..3cef565971 100644 --- a/tests/test_basics.py +++ b/tests/test_basics.py @@ -1,13 +1,12 @@ -import datetime import importlib import logging import os import sys import time from collections import Counter +from datetime import datetime, timedelta, timezone import pytest -from sentry_sdk.utils import datetime_from_isoformat from tests.conftest import patch_start_tracing_child import sentry_sdk @@ -329,12 +328,12 @@ def test_breadcrumbs(sentry_init, capture_events): def test_breadcrumb_ordering(sentry_init, capture_events): sentry_init() events = capture_events() - now = datetime.datetime.now(datetime.timezone.utc).replace(microsecond=0) + now = datetime.now(timezone.utc).replace(microsecond=0) timestamps = [ - now - datetime.timedelta(days=10), - now - datetime.timedelta(days=8), - now - datetime.timedelta(days=12), + now - timedelta(days=10), + now - timedelta(days=8), + now - timedelta(days=12), ] for timestamp in timestamps: @@ -350,7 +349,8 @@ def test_breadcrumb_ordering(sentry_init, capture_events): assert len(event["breadcrumbs"]["values"]) == len(timestamps) timestamps_from_event = [ - datetime_from_isoformat(x["timestamp"]) for x in event["breadcrumbs"]["values"] + datetime.fromisoformat(x["timestamp"]) + for x in event["breadcrumbs"]["values"] ] assert timestamps_from_event == sorted(timestamps) @@ -358,24 +358,24 @@ def test_breadcrumb_ordering(sentry_init, capture_events): def test_breadcrumb_ordering_different_types(sentry_init, capture_events): sentry_init() events = capture_events() - now = datetime.datetime.now(datetime.timezone.utc) + now = datetime.now(timezone.utc) timestamps = [ - now - datetime.timedelta(days=10), - now - datetime.timedelta(days=8), - now.replace(microsecond=0) - datetime.timedelta(days=12), - now - datetime.timedelta(days=9), - now - datetime.timedelta(days=13), - now.replace(microsecond=0) - datetime.timedelta(days=11), + now - timedelta(days=10), + now - timedelta(days=8), + now.replace(microsecond=0) - timedelta(days=12), + now - timedelta(days=9), + now - timedelta(days=13), + now.replace(microsecond=0) - timedelta(days=11), ] breadcrumb_timestamps = [ timestamps[0], timestamps[1].isoformat(), - datetime.datetime.strftime(timestamps[2], "%Y-%m-%dT%H:%M:%S") + "Z", - datetime.datetime.strftime(timestamps[3], "%Y-%m-%dT%H:%M:%S.%f") + "+00:00", - datetime.datetime.strftime(timestamps[4], "%Y-%m-%dT%H:%M:%S.%f") + "+0000", - datetime.datetime.strftime(timestamps[5], "%Y-%m-%dT%H:%M:%S.%f") + "-0000", + datetime.strftime(timestamps[2], "%Y-%m-%dT%H:%M:%S") + "Z", + datetime.strftime(timestamps[3], "%Y-%m-%dT%H:%M:%S.%f") + "+00:00", + datetime.strftime(timestamps[4], "%Y-%m-%dT%H:%M:%S.%f") + "+0000", + datetime.strftime(timestamps[5], "%Y-%m-%dT%H:%M:%S.%f") + "-0000", ] for i, timestamp in enumerate(timestamps): @@ -391,7 +391,8 @@ def test_breadcrumb_ordering_different_types(sentry_init, capture_events): assert len(event["breadcrumbs"]["values"]) == len(timestamps) timestamps_from_event = [ - datetime_from_isoformat(x["timestamp"]) for x in event["breadcrumbs"]["values"] + datetime.fromisoformat(x["timestamp"]) + for x in event["breadcrumbs"]["values"] ] assert timestamps_from_event == sorted(timestamps) diff --git a/tests/test_utils.py b/tests/test_utils.py index c46cac7f9f..4df343a357 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -12,7 +12,6 @@ from sentry_sdk.utils import ( Components, Dsn, - datetime_from_isoformat, env_to_bool, format_timestamp, get_current_thread_meta, @@ -62,55 +61,6 @@ def _normalize_distribution_name(name): return re.sub(r"[-_.]+", "-", name).lower() -@pytest.mark.parametrize( - ("input_str", "expected_output"), - ( - ( - "2021-01-01T00:00:00.000000Z", - datetime(2021, 1, 1, tzinfo=timezone.utc), - ), # UTC time - ( - "2021-01-01T00:00:00.000000", - datetime(2021, 1, 1, tzinfo=datetime.now().astimezone().tzinfo), - ), # No TZ -- assume UTC - ( - "2021-01-01T00:00:00Z", - datetime(2021, 1, 1, tzinfo=timezone.utc), - ), # UTC - No milliseconds - ( - "2021-01-01T00:00:00.000000+00:00", - datetime(2021, 1, 1, tzinfo=timezone.utc), - ), - ( - "2021-01-01T00:00:00.000000-00:00", - datetime(2021, 1, 1, tzinfo=timezone.utc), - ), - ( - "2021-01-01T00:00:00.000000+0000", - datetime(2021, 1, 1, tzinfo=timezone.utc), - ), - ( - "2021-01-01T00:00:00.000000-0000", - datetime(2021, 1, 1, tzinfo=timezone.utc), - ), - ( - "2020-12-31T00:00:00.000000+02:00", - datetime(2020, 12, 31, tzinfo=timezone(timedelta(hours=2))), - ), # UTC+2 time - ( - "2020-12-31T00:00:00.000000-0200", - datetime(2020, 12, 31, tzinfo=timezone(timedelta(hours=-2))), - ), # UTC-2 time - ( - "2020-12-31T00:00:00-0200", - datetime(2020, 12, 31, tzinfo=timezone(timedelta(hours=-2))), - ), # UTC-2 time - no milliseconds - ), -) -def test_datetime_from_isoformat(input_str, expected_output): - assert datetime_from_isoformat(input_str) == expected_output, input_str - - @pytest.mark.parametrize( "env_var_value,strict,expected", [ From e80d293190837f749de81ce33f997ee36a5d1022 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Mon, 30 Sep 2024 14:48:45 +0200 Subject: [PATCH 048/244] linting --- .github/workflows/test-integrations-graphql.yml | 2 +- .../workflows/test-integrations-miscellaneous.yml | 2 +- .../test-integrations-web-frameworks-1.yml | 2 +- sentry_sdk/integrations/opentelemetry/scope.py | 8 +++++++- sentry_sdk/integrations/opentelemetry/utils.py | 13 +++++++++++-- sentry_sdk/tracing.py | 2 +- tests/test_basics.py | 6 ++---- 7 files changed, 24 insertions(+), 11 deletions(-) diff --git a/.github/workflows/test-integrations-graphql.yml b/.github/workflows/test-integrations-graphql.yml index 7ac79befb8..3b4aa9f1b8 100644 --- a/.github/workflows/test-integrations-graphql.yml +++ b/.github/workflows/test-integrations-graphql.yml @@ -27,7 +27,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: ["3.7","3.8","3.11","3.12","3.13"] + python-version: ["3.7","3.8","3.12","3.13"] os: [ubuntu-latest] steps: - uses: actions/checkout@v4.2.0 diff --git a/.github/workflows/test-integrations-miscellaneous.yml b/.github/workflows/test-integrations-miscellaneous.yml index 29c037657b..143648fba3 100644 --- a/.github/workflows/test-integrations-miscellaneous.yml +++ b/.github/workflows/test-integrations-miscellaneous.yml @@ -27,7 +27,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: ["3.7","3.8","3.11","3.12","3.13"] + python-version: ["3.7","3.8","3.12","3.13"] os: [ubuntu-latest] steps: - uses: actions/checkout@v4.2.0 diff --git a/.github/workflows/test-integrations-web-frameworks-1.yml b/.github/workflows/test-integrations-web-frameworks-1.yml index e3c77e006c..651118ab24 100644 --- a/.github/workflows/test-integrations-web-frameworks-1.yml +++ b/.github/workflows/test-integrations-web-frameworks-1.yml @@ -27,7 +27,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: ["3.8","3.10","3.11","3.12","3.13"] + python-version: ["3.8","3.10","3.12","3.13"] os: [ubuntu-latest] services: postgres: diff --git a/sentry_sdk/integrations/opentelemetry/scope.py b/sentry_sdk/integrations/opentelemetry/scope.py index 8d8c977b35..fc76c4ffdb 100644 --- a/sentry_sdk/integrations/opentelemetry/scope.py +++ b/sentry_sdk/integrations/opentelemetry/scope.py @@ -4,7 +4,13 @@ from contextlib import contextmanager from opentelemetry.context import get_value, set_value, attach, detach, get_current -from opentelemetry.trace import SpanContext, NonRecordingSpan, TraceFlags, TraceState, use_span +from opentelemetry.trace import ( + SpanContext, + NonRecordingSpan, + TraceFlags, + TraceState, + use_span, +) from sentry_sdk.integrations.opentelemetry.consts import ( SENTRY_SCOPES_KEY, diff --git a/sentry_sdk/integrations/opentelemetry/utils.py b/sentry_sdk/integrations/opentelemetry/utils.py index 571389fb60..982ecb4509 100644 --- a/sentry_sdk/integrations/opentelemetry/utils.py +++ b/sentry_sdk/integrations/opentelemetry/utils.py @@ -4,7 +4,14 @@ from urllib3.util import parse_url as urlparse from urllib.parse import quote -from opentelemetry.trace import Span, SpanKind, StatusCode, format_trace_id, format_span_id, TraceState +from opentelemetry.trace import ( + Span, + SpanKind, + StatusCode, + format_trace_id, + format_span_id, + TraceState, +) from opentelemetry.semconv.trace import SpanAttributes from opentelemetry.sdk.trace import ReadableSpan @@ -293,7 +300,9 @@ def get_trace_context(span, span_data=None): if span.attributes: trace_context["data"] = dict(span.attributes) - trace_context["dynamic_sampling_context"] = dsc_from_trace_state(span.context.trace_state) + trace_context["dynamic_sampling_context"] = dsc_from_trace_state( + span.context.trace_state + ) # TODO-neel-potel profiler thread_id, thread_name diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py index d085fa81b5..46c19996bd 100644 --- a/sentry_sdk/tracing.py +++ b/sentry_sdk/tracing.py @@ -1453,7 +1453,7 @@ def iter_headers(self): serialize_trace_state, ) - trace_state = self._otel_span.get_span_context().trace_state + trace_state = self._otel_span.get_span_context().trace_state yield BAGGAGE_HEADER_NAME, serialize_trace_state(trace_state) def to_traceparent(self): diff --git a/tests/test_basics.py b/tests/test_basics.py index 3cef565971..62122b3fff 100644 --- a/tests/test_basics.py +++ b/tests/test_basics.py @@ -349,8 +349,7 @@ def test_breadcrumb_ordering(sentry_init, capture_events): assert len(event["breadcrumbs"]["values"]) == len(timestamps) timestamps_from_event = [ - datetime.fromisoformat(x["timestamp"]) - for x in event["breadcrumbs"]["values"] + datetime.fromisoformat(x["timestamp"]) for x in event["breadcrumbs"]["values"] ] assert timestamps_from_event == sorted(timestamps) @@ -391,8 +390,7 @@ def test_breadcrumb_ordering_different_types(sentry_init, capture_events): assert len(event["breadcrumbs"]["values"]) == len(timestamps) timestamps_from_event = [ - datetime.fromisoformat(x["timestamp"]) - for x in event["breadcrumbs"]["values"] + datetime.fromisoformat(x["timestamp"]) for x in event["breadcrumbs"]["values"] ] assert timestamps_from_event == sorted(timestamps) From 362049c7006fac6824058d98512a8ca0a8eee7f3 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Tue, 1 Oct 2024 14:50:07 +0200 Subject: [PATCH 049/244] Added new depenceny to lambda layer generation. (#3586) --- requirements-aws-lambda-layer.txt | 2 ++ 1 file changed, 2 insertions(+) diff --git a/requirements-aws-lambda-layer.txt b/requirements-aws-lambda-layer.txt index 8986fdafc0..7bde8c4844 100644 --- a/requirements-aws-lambda-layer.txt +++ b/requirements-aws-lambda-layer.txt @@ -5,3 +5,5 @@ certifi # So we pin this here to make our Lambda layer work with # Lambda Function using Python 3.7+ urllib3<1.27 + +opentelemetry-distro>=0.35b0 From 29aeebb89fe367bd41be99200a3b7baa86ee3d50 Mon Sep 17 00:00:00 2001 From: Ivana Kellyer Date: Fri, 4 Oct 2024 09:36:51 +0200 Subject: [PATCH 050/244] Fix http_methods_to_capture (#3596) I messed up the merge. If an error happens, we should still report it, whatever the HTTP method. Fixing here. --- sentry_sdk/integrations/_wsgi_common.py | 9 ++++ sentry_sdk/integrations/asgi.py | 65 ++++++++++++++----------- sentry_sdk/integrations/wsgi.py | 35 +++++++------ tests/integrations/django/test_basic.py | 24 ++++++--- 4 files changed, 82 insertions(+), 51 deletions(-) diff --git a/sentry_sdk/integrations/_wsgi_common.py b/sentry_sdk/integrations/_wsgi_common.py index 072a102b7c..baca0f7034 100644 --- a/sentry_sdk/integrations/_wsgi_common.py +++ b/sentry_sdk/integrations/_wsgi_common.py @@ -1,4 +1,5 @@ import json +from contextlib import contextmanager from copy import deepcopy import sentry_sdk @@ -15,6 +16,7 @@ if TYPE_CHECKING: from typing import Any from typing import Dict + from typing import Iterator from typing import Mapping from typing import MutableMapping from typing import Optional @@ -50,6 +52,13 @@ ) +# This noop context manager can be replaced with "from contextlib import nullcontext" when we drop Python 3.6 support +@contextmanager +def nullcontext(): + # type: () -> Iterator[None] + yield + + def request_body_within_bounds(client, content_length): # type: (Optional[sentry_sdk.client.BaseClient], int) -> bool if client is None: diff --git a/sentry_sdk/integrations/asgi.py b/sentry_sdk/integrations/asgi.py index 8426486ce5..22e42acb6d 100644 --- a/sentry_sdk/integrations/asgi.py +++ b/sentry_sdk/integrations/asgi.py @@ -19,6 +19,7 @@ ) from sentry_sdk.integrations._wsgi_common import ( DEFAULT_HTTP_METHODS_TO_CAPTURE, + nullcontext, ) from sentry_sdk.sessions import track_session from sentry_sdk.tracing import ( @@ -190,9 +191,10 @@ async def _run_app(self, scope, receive, send, asgi_version): ) method = scope.get("method", "").upper() - if method in self.http_methods_to_capture: - with sentry_sdk.continue_trace(_get_headers(scope)): - with sentry_sdk.start_transaction( + should_trace = method in self.http_methods_to_capture + with sentry_sdk.continue_trace(_get_headers(scope)): + with ( + sentry_sdk.start_transaction( op=( OP.WEBSOCKET_SERVER if ty == "websocket" @@ -202,37 +204,42 @@ async def _run_app(self, scope, receive, send, asgi_version): source=transaction_source, origin=self.span_origin, custom_sampling_context={"asgi_scope": scope}, - ) as transaction: + ) + if should_trace + else nullcontext() + ) as transaction: + if transaction is not None: logger.debug( "[ASGI] Started transaction: %s", transaction ) transaction.set_tag("asgi.type", ty) - try: - - async def _sentry_wrapped_send(event): - # type: (Dict[str, Any]) -> Any - is_http_response = ( - event.get("type") == "http.response.start" - and "status" in event - ) - if is_http_response: - transaction.set_http_status(event["status"]) - - return await send(event) - - if asgi_version == 2: - return await self.app(scope)( - receive, _sentry_wrapped_send - ) - else: - return await self.app( - scope, receive, _sentry_wrapped_send - ) - except Exception as exc: - _capture_exception( - exc, mechanism_type=self.mechanism_type + try: + + async def _sentry_wrapped_send(event): + # type: (Dict[str, Any]) -> Any + is_http_response = ( + event.get("type") == "http.response.start" + and transaction is not None + and "status" in event ) - raise exc from None + if is_http_response: + transaction.set_http_status(event["status"]) + + return await send(event) + + if asgi_version == 2: + return await self.app(scope)( + receive, _sentry_wrapped_send + ) + else: + return await self.app( + scope, receive, _sentry_wrapped_send + ) + except Exception as exc: + _capture_exception( + exc, mechanism_type=self.mechanism_type + ) + raise exc from None finally: _asgi_middleware_applied.set(False) diff --git a/sentry_sdk/integrations/wsgi.py b/sentry_sdk/integrations/wsgi.py index c19483bc62..f778d7f177 100644 --- a/sentry_sdk/integrations/wsgi.py +++ b/sentry_sdk/integrations/wsgi.py @@ -8,6 +8,7 @@ from sentry_sdk.integrations._wsgi_common import ( DEFAULT_HTTP_METHODS_TO_CAPTURE, _filter_headers, + nullcontext, ) from sentry_sdk.sessions import track_session from sentry_sdk.tracing import Transaction, TRANSACTION_SOURCE_ROUTE @@ -106,27 +107,31 @@ def __call__(self, environ, start_response): ) method = environ.get("REQUEST_METHOD", "").upper() - if method in self.http_methods_to_capture: - with sentry_sdk.continue_trace(environ): - with sentry_sdk.start_transaction( + should_trace = method in self.http_methods_to_capture + with sentry_sdk.continue_trace(environ): + with ( + sentry_sdk.start_transaction( environ, op=OP.HTTP_SERVER, name="generic WSGI request", source=TRANSACTION_SOURCE_ROUTE, origin=self.span_origin, custom_sampling_context={"wsgi_environ": environ}, - ) as transaction: - try: - response = self.app( - environ, - partial( - _sentry_start_response, - start_response, - transaction, - ), - ) - except BaseException: - reraise(*_capture_exception()) + ) + if should_trace + else nullcontext() + ) as transaction: + try: + response = self.app( + environ, + partial( + _sentry_start_response, + start_response, + transaction, + ), + ) + except BaseException: + reraise(*_capture_exception()) finally: _wsgi_middleware_applied.set(False) diff --git a/tests/integrations/django/test_basic.py b/tests/integrations/django/test_basic.py index 2089f1e936..f755d8c3dd 100644 --- a/tests/integrations/django/test_basic.py +++ b/tests/integrations/django/test_basic.py @@ -1113,6 +1113,9 @@ def test_csrf(sentry_init, client): assert content == b"ok" +# This test is forked because it doesn't clean up after itself properly and makes +# other tests fail to resolve routes +@pytest.mark.forked @pytest.mark.skipif(DJANGO_VERSION < (2, 0), reason="Requires Django > 2.0") def test_custom_urlconf_middleware( settings, sentry_init, client, capture_events, render_span_tree @@ -1202,14 +1205,19 @@ def test_transaction_http_method_default(sentry_init, client, capture_events): By default OPTIONS and HEAD requests do not create a transaction. """ sentry_init( - integrations=[DjangoIntegration()], + integrations=[ + DjangoIntegration( + middleware_spans=False, + signals_spans=False, + ) + ], traces_sample_rate=1.0, ) events = capture_events() - client.get("/nomessage") - client.options("/nomessage") - client.head("/nomessage") + client.get(reverse("nomessage")) + client.options(reverse("nomessage")) + client.head(reverse("nomessage")) (event,) = events @@ -1225,15 +1233,17 @@ def test_transaction_http_method_custom(sentry_init, client, capture_events): "OPTIONS", "head", ), # capitalization does not matter + middleware_spans=False, + signals_spans=False, ) ], traces_sample_rate=1.0, ) events = capture_events() - client.get("/nomessage") - client.options("/nomessage") - client.head("/nomessage") + client.get(reverse("nomessage")) + client.options(reverse("nomessage")) + client.head(reverse("nomessage")) assert len(events) == 2 From 6b03581d6c6e48b392879070e9abd19d9ba4b7e4 Mon Sep 17 00:00:00 2001 From: Ivana Kellyer Date: Fri, 4 Oct 2024 10:47:36 +0200 Subject: [PATCH 051/244] Replace nullcontext with stdlib nullcontext --- sentry_sdk/integrations/_wsgi_common.py | 9 --------- sentry_sdk/integrations/asgi.py | 2 +- sentry_sdk/integrations/wsgi.py | 2 +- 3 files changed, 2 insertions(+), 11 deletions(-) diff --git a/sentry_sdk/integrations/_wsgi_common.py b/sentry_sdk/integrations/_wsgi_common.py index baca0f7034..072a102b7c 100644 --- a/sentry_sdk/integrations/_wsgi_common.py +++ b/sentry_sdk/integrations/_wsgi_common.py @@ -1,5 +1,4 @@ import json -from contextlib import contextmanager from copy import deepcopy import sentry_sdk @@ -16,7 +15,6 @@ if TYPE_CHECKING: from typing import Any from typing import Dict - from typing import Iterator from typing import Mapping from typing import MutableMapping from typing import Optional @@ -52,13 +50,6 @@ ) -# This noop context manager can be replaced with "from contextlib import nullcontext" when we drop Python 3.6 support -@contextmanager -def nullcontext(): - # type: () -> Iterator[None] - yield - - def request_body_within_bounds(client, content_length): # type: (Optional[sentry_sdk.client.BaseClient], int) -> bool if client is None: diff --git a/sentry_sdk/integrations/asgi.py b/sentry_sdk/integrations/asgi.py index 22e42acb6d..df8d26d833 100644 --- a/sentry_sdk/integrations/asgi.py +++ b/sentry_sdk/integrations/asgi.py @@ -6,6 +6,7 @@ import asyncio import inspect +from contextlib import nullcontext from copy import deepcopy from functools import partial @@ -19,7 +20,6 @@ ) from sentry_sdk.integrations._wsgi_common import ( DEFAULT_HTTP_METHODS_TO_CAPTURE, - nullcontext, ) from sentry_sdk.sessions import track_session from sentry_sdk.tracing import ( diff --git a/sentry_sdk/integrations/wsgi.py b/sentry_sdk/integrations/wsgi.py index f778d7f177..843cf56564 100644 --- a/sentry_sdk/integrations/wsgi.py +++ b/sentry_sdk/integrations/wsgi.py @@ -1,4 +1,5 @@ import sys +from contextlib import nullcontext from functools import partial import sentry_sdk @@ -8,7 +9,6 @@ from sentry_sdk.integrations._wsgi_common import ( DEFAULT_HTTP_METHODS_TO_CAPTURE, _filter_headers, - nullcontext, ) from sentry_sdk.sessions import track_session from sentry_sdk.tracing import Transaction, TRANSACTION_SOURCE_ROUTE From b65b7421f4c1df492eaee7bd54076f8fd6e10137 Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Fri, 4 Oct 2024 14:56:08 +0200 Subject: [PATCH 052/244] Head SDK DSC population (#3599) * Populates a DSC with correct values when we don't have an incoming trace. * We rely on `trace_state.add` only adding new keys to the tracestate so these values will be populated in the first sampling decision on the root and just be propagated further. Note that transaction name is missing here for now and will be dealt with separately as part of the transaction name PRs. closes #3479 --- .../integrations/opentelemetry/consts.py | 4 +- .../integrations/opentelemetry/sampler.py | 53 ++++++++++----- .../integrations/opentelemetry/scope.py | 1 - .../integrations/opentelemetry/utils.py | 65 +++++++++++++++++-- sentry_sdk/tracing.py | 36 +++++++--- 5 files changed, 125 insertions(+), 34 deletions(-) diff --git a/sentry_sdk/integrations/opentelemetry/consts.py b/sentry_sdk/integrations/opentelemetry/consts.py index cb088f13a5..6409d2822d 100644 --- a/sentry_sdk/integrations/opentelemetry/consts.py +++ b/sentry_sdk/integrations/opentelemetry/consts.py @@ -1,4 +1,5 @@ from opentelemetry.context import create_key +from sentry_sdk.tracing_utils import Baggage # propagation keys @@ -11,7 +12,8 @@ SENTRY_USE_CURRENT_SCOPE_KEY = create_key("sentry_use_current_scope") SENTRY_USE_ISOLATION_SCOPE_KEY = create_key("sentry_use_isolation_scope") -SENTRY_TRACE_STATE_DROPPED = "sentry_dropped" +TRACESTATE_SAMPLED_KEY = Baggage.SENTRY_PREFIX + "sampled" +TRACESTATE_SAMPLE_RATE_KEY = Baggage.SENTRY_PREFIX + "sample_rate" OTEL_SENTRY_CONTEXT = "otel" SPAN_ORIGIN = "auto.otel" diff --git a/sentry_sdk/integrations/opentelemetry/sampler.py b/sentry_sdk/integrations/opentelemetry/sampler.py index 5fa7c9e1e8..404957f028 100644 --- a/sentry_sdk/integrations/opentelemetry/sampler.py +++ b/sentry_sdk/integrations/opentelemetry/sampler.py @@ -1,3 +1,4 @@ +from typing import cast from random import random from opentelemetry import trace @@ -6,13 +7,17 @@ from opentelemetry.trace.span import TraceState import sentry_sdk -from sentry_sdk.integrations.opentelemetry.consts import SENTRY_TRACE_STATE_DROPPED from sentry_sdk.tracing_utils import has_tracing_enabled from sentry_sdk.utils import is_valid_sample_rate, logger +from sentry_sdk.integrations.opentelemetry.consts import ( + TRACESTATE_SAMPLED_KEY, + TRACESTATE_SAMPLE_RATE_KEY, +) -from typing import TYPE_CHECKING, Optional, Sequence +from typing import TYPE_CHECKING if TYPE_CHECKING: + from typing import Optional, Sequence, Union from opentelemetry.context import Context from opentelemetry.trace import Link, SpanKind from opentelemetry.trace.span import SpanContext @@ -32,30 +37,42 @@ def get_parent_sampled(parent_context, trace_id): if parent_context.trace_flags.sampled: return True - dropped = parent_context.trace_state.get(SENTRY_TRACE_STATE_DROPPED) == "true" - if dropped: + dsc_sampled = parent_context.trace_state.get(TRACESTATE_SAMPLED_KEY) + if dsc_sampled == "true": + return True + elif dsc_sampled == "false": return False - # TODO-anton: fall back to sampling decision in DSC (for this die DSC needs to be set in the trace_state) - return None -def dropped_result(span_context): - # type: (SpanContext) -> SamplingResult - trace_state = span_context.trace_state.update(SENTRY_TRACE_STATE_DROPPED, "true") +def dropped_result(span_context, attributes, sample_rate=None): + # type: (SpanContext, Attributes, Optional[float]) -> SamplingResult + # note that trace_state.add will NOT overwrite existing entries + # so these will only be added the first time in a root span sampling decision + trace_state = span_context.trace_state.add(TRACESTATE_SAMPLED_KEY, "false") + if sample_rate: + trace_state = trace_state.add(TRACESTATE_SAMPLE_RATE_KEY, str(sample_rate)) return SamplingResult( Decision.DROP, + attributes=attributes, trace_state=trace_state, ) -def sampled_result(span_context): - # type: (SpanContext) -> SamplingResult +def sampled_result(span_context, attributes, sample_rate): + # type: (SpanContext, Attributes, float) -> SamplingResult + # note that trace_state.add will NOT overwrite existing entries + # so these will only be added the first time in a root span sampling decision + trace_state = span_context.trace_state.add(TRACESTATE_SAMPLED_KEY, "true").add( + TRACESTATE_SAMPLE_RATE_KEY, str(sample_rate) + ) + return SamplingResult( Decision.RECORD_AND_SAMPLE, - trace_state=span_context.trace_state, + attributes=attributes, + trace_state=trace_state, ) @@ -77,7 +94,7 @@ def should_sample( # No tracing enabled, thus no sampling if not has_tracing_enabled(client.options): - return dropped_result(parent_span_context) + return dropped_result(parent_span_context, attributes) sample_rate = None @@ -112,16 +129,16 @@ def should_sample( logger.warning( f"[Tracing] Discarding {name} because of invalid sample rate." ) - return dropped_result(parent_span_context) + return dropped_result(parent_span_context, attributes) # Roll the dice on sample rate - sampled = random() < float(sample_rate) + sample_rate = float(cast("Union[bool, float, int]", sample_rate)) + sampled = random() < sample_rate - # TODO-neel-potel set sample rate as attribute for DSC if sampled: - return sampled_result(parent_span_context) + return sampled_result(parent_span_context, attributes, sample_rate) else: - return dropped_result(parent_span_context) + return dropped_result(parent_span_context, attributes, sample_rate) def get_description(self) -> str: return self.__class__.__name__ diff --git a/sentry_sdk/integrations/opentelemetry/scope.py b/sentry_sdk/integrations/opentelemetry/scope.py index fc76c4ffdb..11714fda53 100644 --- a/sentry_sdk/integrations/opentelemetry/scope.py +++ b/sentry_sdk/integrations/opentelemetry/scope.py @@ -8,7 +8,6 @@ SpanContext, NonRecordingSpan, TraceFlags, - TraceState, use_span, ) diff --git a/sentry_sdk/integrations/opentelemetry/utils.py b/sentry_sdk/integrations/opentelemetry/utils.py index 982ecb4509..fcfec97f5c 100644 --- a/sentry_sdk/integrations/opentelemetry/utils.py +++ b/sentry_sdk/integrations/opentelemetry/utils.py @@ -15,6 +15,7 @@ from opentelemetry.semconv.trace import SpanAttributes from opentelemetry.sdk.trace import ReadableSpan +import sentry_sdk from sentry_sdk.utils import Dsn from sentry_sdk.consts import SPANSTATUS from sentry_sdk.tracing import get_span_status_from_http_code, DEFAULT_SPAN_ORIGIN @@ -24,7 +25,7 @@ from sentry_sdk._types import TYPE_CHECKING if TYPE_CHECKING: - from typing import Any, Optional, Mapping, Sequence, Union, ItemsView + from typing import Any, Optional, Mapping, Sequence, Union from sentry_sdk._types import OtelExtractedSpanData @@ -300,9 +301,8 @@ def get_trace_context(span, span_data=None): if span.attributes: trace_context["data"] = dict(span.attributes) - trace_context["dynamic_sampling_context"] = dsc_from_trace_state( - span.context.trace_state - ) + trace_state = get_trace_state(span) + trace_context["dynamic_sampling_context"] = dsc_from_trace_state(trace_state) # TODO-neel-potel profiler thread_id, thread_name @@ -319,6 +319,11 @@ def trace_state_from_baggage(baggage): return TraceState(items) +def baggage_from_trace_state(trace_state): + # type: (TraceState) -> Baggage + return Baggage(dsc_from_trace_state(trace_state)) + + def serialize_trace_state(trace_state): # type: (TraceState) -> str sentry_items = [] @@ -336,3 +341,55 @@ def dsc_from_trace_state(trace_state): key = re.sub(Baggage.SENTRY_PREFIX_REGEX, "", k) dsc[key] = v return dsc + + +def has_incoming_trace(trace_state): + # type: (TraceState) -> bool + """ + The existence a sentry-trace_id in the baggage implies we continued an upstream trace. + """ + return (Baggage.SENTRY_PREFIX + "trace_id") in trace_state + + +def get_trace_state(span): + # type: (Union[Span, ReadableSpan]) -> TraceState + """ + Get the existing trace_state with sentry items + or populate it if we are the head SDK. + """ + span_context = span.get_span_context() + if not span_context: + return TraceState() + + trace_state = span_context.trace_state + + if has_incoming_trace(trace_state): + return trace_state + else: + client = sentry_sdk.get_client() + if not client.is_active(): + return trace_state + + options = client.options or {} + + trace_state = trace_state.update( + Baggage.SENTRY_PREFIX + "trace_id", format_trace_id(span_context.trace_id) + ) + + if options.get("environment"): + trace_state = trace_state.update( + Baggage.SENTRY_PREFIX + "environment", options["environment"] + ) + + if options.get("release"): + trace_state = trace_state.update( + Baggage.SENTRY_PREFIX + "release", options["release"] + ) + + if options.get("dsn"): + trace_state = trace_state.update( + Baggage.SENTRY_PREFIX + "public_key", Dsn(options["dsn"]).public_key + ) + + # TODO-neel-potel head dsc transaction name + return trace_state diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py index 46c19996bd..1c3cb5b3f0 100644 --- a/sentry_sdk/tracing.py +++ b/sentry_sdk/tracing.py @@ -5,7 +5,12 @@ from datetime import datetime, timedelta, timezone from opentelemetry import trace as otel_trace, context -from opentelemetry.trace import format_trace_id, format_span_id, Span as OtelSpan +from opentelemetry.trace import ( + format_trace_id, + format_span_id, + Span as OtelSpan, + TraceState, +) from opentelemetry.trace.status import StatusCode from opentelemetry.sdk.trace import ReadableSpan @@ -1453,8 +1458,7 @@ def iter_headers(self): serialize_trace_state, ) - trace_state = self._otel_span.get_span_context().trace_state - yield BAGGAGE_HEADER_NAME, serialize_trace_state(trace_state) + yield BAGGAGE_HEADER_NAME, serialize_trace_state(self.trace_state) def to_traceparent(self): # type: () -> str @@ -1471,10 +1475,26 @@ def to_traceparent(self): return traceparent + @property + def trace_state(self): + # type: () -> TraceState + from sentry_sdk.integrations.opentelemetry.utils import ( + get_trace_state, + ) + + return get_trace_state(self._otel_span) + def to_baggage(self): - # type: () -> Optional[Baggage] - # TODO-neel-potel head SDK populate baggage mess - pass + # type: () -> Baggage + return self.get_baggage() + + def get_baggage(self): + # type: () -> Baggage + from sentry_sdk.integrations.opentelemetry.utils import ( + baggage_from_trace_state, + ) + + return baggage_from_trace_state(self.trace_state) def set_tag(self, key, value): # type: (str, Any) -> None @@ -1568,10 +1588,6 @@ def set_context(self, key, value): # type: (str, Any) -> None pass - def get_baggage(self): - # type: () -> Baggage - pass - if TYPE_CHECKING: From f0703b360a0a98fbeaa53fec41577be7a1f0f4df Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Tue, 8 Oct 2024 14:56:09 +0200 Subject: [PATCH 053/244] Set correct span op (#3628) Set the correct span `op` for database spans, instead of the generic `"db"`. --- sentry_sdk/integrations/opentelemetry/utils.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/sentry_sdk/integrations/opentelemetry/utils.py b/sentry_sdk/integrations/opentelemetry/utils.py index fcfec97f5c..89dd3cf1d2 100644 --- a/sentry_sdk/integrations/opentelemetry/utils.py +++ b/sentry_sdk/integrations/opentelemetry/utils.py @@ -17,7 +17,7 @@ import sentry_sdk from sentry_sdk.utils import Dsn -from sentry_sdk.consts import SPANSTATUS +from sentry_sdk.consts import SPANSTATUS, OP from sentry_sdk.tracing import get_span_status_from_http_code, DEFAULT_SPAN_ORIGIN from sentry_sdk.tracing_utils import Baggage from sentry_sdk.integrations.opentelemetry.consts import SentrySpanAttribute @@ -182,7 +182,7 @@ def span_data_for_db_query(span): # type: (ReadableSpan) -> OtelExtractedSpanData span_attributes = span.attributes or {} - op = "db" + op = span_attributes.get(SentrySpanAttribute.OP, OP.DB) statement = span_attributes.get(SpanAttributes.DB_STATEMENT, None) statement = cast("Optional[str]", statement) From 58cd8aaf3f1875af05e57c93858b4150819fadca Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Tue, 8 Oct 2024 14:58:51 +0200 Subject: [PATCH 054/244] Implement set_contexts and fix response context (#3630) --- sentry_sdk/integrations/opentelemetry/consts.py | 1 + .../integrations/opentelemetry/potel_span_processor.py | 5 ++++- sentry_sdk/integrations/opentelemetry/utils.py | 3 +++ sentry_sdk/tracing.py | 8 +++++--- 4 files changed, 13 insertions(+), 4 deletions(-) diff --git a/sentry_sdk/integrations/opentelemetry/consts.py b/sentry_sdk/integrations/opentelemetry/consts.py index 6409d2822d..a71e304cf5 100644 --- a/sentry_sdk/integrations/opentelemetry/consts.py +++ b/sentry_sdk/integrations/opentelemetry/consts.py @@ -28,3 +28,4 @@ class SentrySpanAttribute: MEASUREMENT = "sentry.measurement" TAG = "sentry.tag" NAME = "sentry.name" + CONTEXT = "sentry.context" diff --git a/sentry_sdk/integrations/opentelemetry/potel_span_processor.py b/sentry_sdk/integrations/opentelemetry/potel_span_processor.py index 63a9acb9db..0076743245 100644 --- a/sentry_sdk/integrations/opentelemetry/potel_span_processor.py +++ b/sentry_sdk/integrations/opentelemetry/potel_span_processor.py @@ -138,11 +138,14 @@ def _root_span_to_transaction_event(self, span): return None span_data = extract_span_data(span) - (_, description, _, _, _) = span_data + (_, description, _, http_status, _) = span_data trace_context = get_trace_context(span, span_data=span_data) contexts = {"trace": trace_context} + if http_status: + contexts["response"] = {"status_code": http_status} + if span.resource.attributes: contexts[OTEL_SENTRY_CONTEXT] = {"resource": dict(span.resource.attributes)} diff --git a/sentry_sdk/integrations/opentelemetry/utils.py b/sentry_sdk/integrations/opentelemetry/utils.py index 89dd3cf1d2..ddc9f0c25a 100644 --- a/sentry_sdk/integrations/opentelemetry/utils.py +++ b/sentry_sdk/integrations/opentelemetry/utils.py @@ -208,6 +208,9 @@ def extract_span_status(span): if inferred_status: return (inferred_status, http_status) + if http_status is not None: + return (inferred_status, http_status) + if ( status.description is not None and status.description in GRPC_ERROR_MAP.values() diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py index 1c3cb5b3f0..1ca79f6c14 100644 --- a/sentry_sdk/tracing.py +++ b/sentry_sdk/tracing.py @@ -1504,6 +1504,7 @@ def set_tag(self, key, value): def set_data(self, key, value): # type: (str, Any) -> None + # TODO-neel-potel we cannot add dicts here self.set_attribute(key, value) def set_attribute(self, key, value): @@ -1582,11 +1583,12 @@ def get_profile_context(self): # type: () -> Optional[ProfileContext] pass - # transaction/root span methods - def set_context(self, key, value): # type: (str, Any) -> None - pass + from sentry_sdk.integrations.opentelemetry.consts import SentrySpanAttribute + # TODO-neel-potel we cannot add dicts here + + self.set_attribute(f"{SentrySpanAttribute.CONTEXT}.{key}", value) if TYPE_CHECKING: From 4b2b6dbe470b63f519f0d9e4108a4313b78cdb6b Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Tue, 8 Oct 2024 15:02:16 +0200 Subject: [PATCH 055/244] Fix redis pipeline commands, because otel attributes do not allow dicts (#3631) Otel `span.set_attribute()` does not allow `dict` values, thus we change the data to be compatible --- MIGRATION_GUIDE.md | 1 + sentry_sdk/integrations/redis/utils.py | 9 ++------- .../redis/asyncio/test_redis_asyncio.py | 6 ++---- .../redis/cluster/test_redis_cluster.py | 6 ++---- .../cluster_asyncio/test_redis_cluster_asyncio.py | 6 ++---- tests/integrations/redis/test_redis.py | 6 ++---- .../test_redis_py_cluster_legacy.py | 13 +++++-------- 7 files changed, 16 insertions(+), 31 deletions(-) diff --git a/MIGRATION_GUIDE.md b/MIGRATION_GUIDE.md index 2a20350121..e90f63cbaf 100644 --- a/MIGRATION_GUIDE.md +++ b/MIGRATION_GUIDE.md @@ -17,6 +17,7 @@ Looking to upgrade from Sentry SDK 2.x to 3.x? Here's a comprehensive list of wh - The `Profile()` constructor does not accept a `hub` parameter anymore. - A `Profile` object does not have a `.hub` property anymore. - `sentry_sdk.continue_trace` no longer returns a `Transaction` and is now a context manager. +- Redis integration: In Redis pipeline spans there is no `span["data"]["redis.commands"]` that contains a dict `{"count": 3, "first_ten": ["cmd1", "cmd2", ...]}` but instead `span["data"]["redis.commands.count"]` (containing `3`) and `span["data"]["redis.commands.first_ten"]` (containing `["cmd1", "cmd2", ...]`). ### Removed diff --git a/sentry_sdk/integrations/redis/utils.py b/sentry_sdk/integrations/redis/utils.py index 27fae1e8ca..894c56305b 100644 --- a/sentry_sdk/integrations/redis/utils.py +++ b/sentry_sdk/integrations/redis/utils.py @@ -120,13 +120,8 @@ def _set_pipeline_data( command = get_command_args_fn(arg) commands.append(_get_safe_command(command[0], command[1:])) - span.set_data( - "redis.commands", - { - "count": len(command_stack), - "first_ten": commands, - }, - ) + span.set_data("redis.commands.count", len(command_stack)) + span.set_data("redis.commands.first_ten", commands) def _set_client_data(span, is_cluster, name, *args): diff --git a/tests/integrations/redis/asyncio/test_redis_asyncio.py b/tests/integrations/redis/asyncio/test_redis_asyncio.py index 17130b337b..2a0b96b021 100644 --- a/tests/integrations/redis/asyncio/test_redis_asyncio.py +++ b/tests/integrations/redis/asyncio/test_redis_asyncio.py @@ -65,12 +65,10 @@ async def test_async_redis_pipeline( (span,) = event["spans"] assert span["op"] == "db.redis" assert span["description"] == "redis.pipeline.execute" + assert span["data"]["redis.commands.count"] == 3 + assert span["data"]["redis.commands.first_ten"] == expected_first_ten assert span["data"] == ApproxDict( { - "redis.commands": { - "count": 3, - "first_ten": expected_first_ten, - }, SPANDATA.DB_SYSTEM: "redis", SPANDATA.DB_NAME: "0", SPANDATA.SERVER_ADDRESS: connection.connection_pool.connection_kwargs.get( diff --git a/tests/integrations/redis/cluster/test_redis_cluster.py b/tests/integrations/redis/cluster/test_redis_cluster.py index 83d1b45cc9..26feee1dae 100644 --- a/tests/integrations/redis/cluster/test_redis_cluster.py +++ b/tests/integrations/redis/cluster/test_redis_cluster.py @@ -128,12 +128,10 @@ def test_rediscluster_pipeline( (span,) = event["spans"] assert span["op"] == "db.redis" assert span["description"] == "redis.pipeline.execute" + assert span["data"]["redis.commands.count"] == 3 + assert span["data"]["redis.commands.first_ten"] == expected_first_ten assert span["data"] == ApproxDict( { - "redis.commands": { - "count": 3, - "first_ten": expected_first_ten, - }, SPANDATA.DB_SYSTEM: "redis", # ClusterNode converts localhost to 127.0.0.1 SPANDATA.SERVER_ADDRESS: "127.0.0.1", diff --git a/tests/integrations/redis/cluster_asyncio/test_redis_cluster_asyncio.py b/tests/integrations/redis/cluster_asyncio/test_redis_cluster_asyncio.py index 993a2962ca..b11808fb50 100644 --- a/tests/integrations/redis/cluster_asyncio/test_redis_cluster_asyncio.py +++ b/tests/integrations/redis/cluster_asyncio/test_redis_cluster_asyncio.py @@ -131,12 +131,10 @@ async def test_async_redis_pipeline( (span,) = event["spans"] assert span["op"] == "db.redis" assert span["description"] == "redis.pipeline.execute" + assert span["data"]["redis.commands.count"] == 3 + assert span["data"]["redis.commands.first_ten"] == expected_first_ten assert span["data"] == ApproxDict( { - "redis.commands": { - "count": 3, - "first_ten": expected_first_ten, - }, SPANDATA.DB_SYSTEM: "redis", # ClusterNode converts localhost to 127.0.0.1 SPANDATA.SERVER_ADDRESS: "127.0.0.1", diff --git a/tests/integrations/redis/test_redis.py b/tests/integrations/redis/test_redis.py index 5173885f33..aff5b3ec9b 100644 --- a/tests/integrations/redis/test_redis.py +++ b/tests/integrations/redis/test_redis.py @@ -72,10 +72,8 @@ def test_redis_pipeline( assert span["op"] == "db.redis" assert span["description"] == "redis.pipeline.execute" assert span["data"][SPANDATA.DB_SYSTEM] == "redis" - assert span["data"]["redis.commands"] == { - "count": 3, - "first_ten": expected_first_ten, - } + assert span["data"]["redis.commands.count"] == 3 + assert span["data"]["redis.commands.first_ten"] == expected_first_ten assert span["tags"] == { "redis.transaction": is_transaction, "redis.is_cluster": False, diff --git a/tests/integrations/redis_py_cluster_legacy/test_redis_py_cluster_legacy.py b/tests/integrations/redis_py_cluster_legacy/test_redis_py_cluster_legacy.py index 36a27d569d..5e0b724436 100644 --- a/tests/integrations/redis_py_cluster_legacy/test_redis_py_cluster_legacy.py +++ b/tests/integrations/redis_py_cluster_legacy/test_redis_py_cluster_legacy.py @@ -95,12 +95,10 @@ def test_rediscluster_pipeline( (span,) = event["spans"] assert span["op"] == "db.redis" assert span["description"] == "redis.pipeline.execute" + assert span["data"]["redis.commands.count"] == 3 + assert span["data"]["redis.commands.first_ten"] == expected_first_ten assert span["data"] == ApproxDict( { - "redis.commands": { - "count": 3, - "first_ten": expected_first_ten, - }, SPANDATA.DB_SYSTEM: "redis", SPANDATA.DB_NAME: "1", SPANDATA.SERVER_ADDRESS: "localhost", @@ -158,12 +156,11 @@ def test_db_connection_attributes_pipeline( (span,) = event["spans"] assert span["op"] == "db.redis" assert span["description"] == "redis.pipeline.execute" + assert span["data"]["redis.commands.count"] == 1 + assert span["data"]["redis.commands.first_ten"] == ["GET 'foo'"] + assert span["data"] == ApproxDict( { - "redis.commands": { - "count": 1, - "first_ten": ["GET 'foo'"], - }, SPANDATA.DB_SYSTEM: "redis", SPANDATA.DB_NAME: "1", SPANDATA.SERVER_ADDRESS: "localhost", From 74cb7dbc380c802f0ab91fbf205a755ae7e9eab8 Mon Sep 17 00:00:00 2001 From: Ivana Kellyer Date: Wed, 9 Oct 2024 13:32:08 +0200 Subject: [PATCH 056/244] make black happy --- sentry_sdk/tracing.py | 1 + 1 file changed, 1 insertion(+) diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py index 1ca79f6c14..2e560ff3d7 100644 --- a/sentry_sdk/tracing.py +++ b/sentry_sdk/tracing.py @@ -1586,6 +1586,7 @@ def get_profile_context(self): def set_context(self, key, value): # type: (str, Any) -> None from sentry_sdk.integrations.opentelemetry.consts import SentrySpanAttribute + # TODO-neel-potel we cannot add dicts here self.set_attribute(f"{SentrySpanAttribute.CONTEXT}.{key}", value) From c6ad235b48f3ffd5d762321341c14967187af254 Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Thu, 10 Oct 2024 15:36:27 +0200 Subject: [PATCH 057/244] Set internal_error span status on span context manager exit (#3642) --- sentry_sdk/tracing.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py index 2e560ff3d7..112ad2bdc4 100644 --- a/sentry_sdk/tracing.py +++ b/sentry_sdk/tracing.py @@ -1273,8 +1273,10 @@ def __enter__(self): def __exit__(self, ty, value, tb): # type: (Optional[Any], Optional[Any], Optional[Any]) -> None + if value is not None: + self.set_status(SPANSTATUS.INTERNAL_ERROR) + self.finish() - # XXX set status to error if unset and an exception occurred? context.detach(self._ctx_token) def _get_attribute(self, name): From 56f2113cee285dc7f7c62785ab885da7da280a89 Mon Sep 17 00:00:00 2001 From: Ivana Kellyer Date: Thu, 10 Oct 2024 15:40:23 +0200 Subject: [PATCH 058/244] Remove deprecated span description (#3640) --- MIGRATION_GUIDE.md | 1 + sentry_sdk/integrations/opentelemetry/scope.py | 9 --------- 2 files changed, 1 insertion(+), 9 deletions(-) diff --git a/MIGRATION_GUIDE.md b/MIGRATION_GUIDE.md index e90f63cbaf..0975b0406c 100644 --- a/MIGRATION_GUIDE.md +++ b/MIGRATION_GUIDE.md @@ -21,6 +21,7 @@ Looking to upgrade from Sentry SDK 2.x to 3.x? Here's a comprehensive list of wh ### Removed +- Spans no longer have a `description`. Use `name` instead. - Dropped support for Python 3.6. - `sentry_sdk.metrics` and associated metrics APIs have been removed as Sentry no longer accepts metrics data in this form. See https://sentry.zendesk.com/hc/en-us/articles/26369339769883-Upcoming-API-Changes-to-Metrics - The experimental options `enable_metrics`, `before_emit_metric` and `metric_code_locations` have been removed. diff --git a/sentry_sdk/integrations/opentelemetry/scope.py b/sentry_sdk/integrations/opentelemetry/scope.py index 11714fda53..0c0087dae1 100644 --- a/sentry_sdk/integrations/opentelemetry/scope.py +++ b/sentry_sdk/integrations/opentelemetry/scope.py @@ -1,5 +1,3 @@ -import warnings - from typing import cast from contextlib import contextmanager @@ -125,13 +123,6 @@ def start_transaction(self, custom_sampling_context=None, **kwargs): def start_span(self, custom_sampling_context=None, **kwargs): # type: (Optional[SamplingContext], Any) -> POTelSpan - if kwargs.get("description") is not None: - warnings.warn( - "The `description` parameter is deprecated. Please use `name` instead.", - DeprecationWarning, - stacklevel=2, - ) - return POTelSpan(**kwargs, scope=self) From a67125ae6cd25f3674e0d7ad950a1e0662ac302f Mon Sep 17 00:00:00 2001 From: Ivana Kellyer Date: Thu, 10 Oct 2024 15:40:50 +0200 Subject: [PATCH 059/244] Fix clickhouse-driver integration spans (#3638) --- MIGRATION_GUIDE.md | 1 + sentry_sdk/integrations/clickhouse_driver.py | 50 ++++-- sentry_sdk/tracing.py | 20 +-- .../test_clickhouse_driver.py | 146 ++++++++++++------ 4 files changed, 149 insertions(+), 68 deletions(-) diff --git a/MIGRATION_GUIDE.md b/MIGRATION_GUIDE.md index 0975b0406c..e64d13da4e 100644 --- a/MIGRATION_GUIDE.md +++ b/MIGRATION_GUIDE.md @@ -18,6 +18,7 @@ Looking to upgrade from Sentry SDK 2.x to 3.x? Here's a comprehensive list of wh - A `Profile` object does not have a `.hub` property anymore. - `sentry_sdk.continue_trace` no longer returns a `Transaction` and is now a context manager. - Redis integration: In Redis pipeline spans there is no `span["data"]["redis.commands"]` that contains a dict `{"count": 3, "first_ten": ["cmd1", "cmd2", ...]}` but instead `span["data"]["redis.commands.count"]` (containing `3`) and `span["data"]["redis.commands.first_ten"]` (containing `["cmd1", "cmd2", ...]`). +- clickhouse-driver integration: The query is now available under the `db.query.text` span attribute (only if `send_default_pii` is `True`). ### Removed diff --git a/sentry_sdk/integrations/clickhouse_driver.py b/sentry_sdk/integrations/clickhouse_driver.py index daf4c2257c..4ee18d0e54 100644 --- a/sentry_sdk/integrations/clickhouse_driver.py +++ b/sentry_sdk/integrations/clickhouse_driver.py @@ -91,13 +91,15 @@ def _inner(*args: P.args, **kwargs: P.kwargs) -> T: _set_db_data(span, connection) - span.set_data("query", query) + if should_send_default_pii(): + span.set_attribute("db.query.text", query) if query_id: - span.set_data("db.query_id", query_id) + span.set_attribute("db.query_id", query_id) if params and should_send_default_pii(): - span.set_data("db.params", params) + connection._sentry_db_params = params + span.set_attribute("db.params", str(params)) # run the original code ret = f(*args, **kwargs) @@ -115,12 +117,26 @@ def _inner_end(*args: P.args, **kwargs: P.kwargs) -> T: if span is not None: if res is not None and should_send_default_pii(): - span.set_data("db.result", res) + span.set_attribute("db.result", str(res)) with capture_internal_exceptions(): - span.scope.add_breadcrumb( - message=span._data.pop("query"), category="query", data=span._data - ) + query = span.get_attribute("db.query.text") + if query: + data = {} + for attr in ( + "db.params", + "db.result", + SPANDATA.DB_SYSTEM, + SPANDATA.DB_USER, + SPANDATA.SERVER_ADDRESS, + SPANDATA.SERVER_PORT, + ): + if span.get_attribute(attr): + data[attr] = span.get_attribute(attr) + + sentry_sdk.add_breadcrumb( + message=query, category="query", data=data + ) span.finish() @@ -139,9 +155,15 @@ def _inner_send_data(*args: P.args, **kwargs: P.kwargs) -> T: _set_db_data(span, instance.connection) if should_send_default_pii(): - db_params = span._data.get("db.params", []) + db_params = ( + getattr(instance.connection, "_sentry_db_params", None) or [] + ) db_params.extend(data) - span.set_data("db.params", db_params) + span.set_attribute("db.params", str(db_params)) + try: + del instance.connection._sentry_db_params + except AttributeError: + pass return f(*args, **kwargs) @@ -151,8 +173,8 @@ def _inner_send_data(*args: P.args, **kwargs: P.kwargs) -> T: def _set_db_data( span: Span, connection: clickhouse_driver.connection.Connection ) -> None: - span.set_data(SPANDATA.DB_SYSTEM, "clickhouse") - span.set_data(SPANDATA.SERVER_ADDRESS, connection.host) - span.set_data(SPANDATA.SERVER_PORT, connection.port) - span.set_data(SPANDATA.DB_NAME, connection.database) - span.set_data(SPANDATA.DB_USER, connection.user) + span.set_attribute(SPANDATA.DB_SYSTEM, "clickhouse") + span.set_attribute(SPANDATA.SERVER_ADDRESS, connection.host) + span.set_attribute(SPANDATA.SERVER_PORT, connection.port) + span.set_attribute(SPANDATA.DB_NAME, connection.database) + span.set_attribute(SPANDATA.DB_USER, connection.user) diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py index 112ad2bdc4..5b0795af62 100644 --- a/sentry_sdk/tracing.py +++ b/sentry_sdk/tracing.py @@ -1279,18 +1279,12 @@ def __exit__(self, ty, value, tb): self.finish() context.detach(self._ctx_token) - def _get_attribute(self, name): - # type: (str) -> Optional[Any] - if not isinstance(self._otel_span, ReadableSpan): - return None - return self._otel_span.attributes.get(name) - @property def description(self): # type: () -> Optional[str] from sentry_sdk.integrations.opentelemetry.consts import SentrySpanAttribute - return self._get_attribute(SentrySpanAttribute.DESCRIPTION) + return self.get_attribute(SentrySpanAttribute.DESCRIPTION) @description.setter def description(self, value): @@ -1305,7 +1299,7 @@ def origin(self): # type: () -> Optional[str] from sentry_sdk.integrations.opentelemetry.consts import SentrySpanAttribute - return self._get_attribute(SentrySpanAttribute.ORIGIN) + return self.get_attribute(SentrySpanAttribute.ORIGIN) @origin.setter def origin(self, value): @@ -1378,7 +1372,7 @@ def op(self): # type: () -> Optional[str] from sentry_sdk.integrations.opentelemetry.consts import SentrySpanAttribute - return self._get_attribute(SentrySpanAttribute.OP) + return self.get_attribute(SentrySpanAttribute.OP) @op.setter def op(self, value): @@ -1393,7 +1387,7 @@ def name(self): # type: () -> Optional[str] from sentry_sdk.integrations.opentelemetry.consts import SentrySpanAttribute - return self._get_attribute(SentrySpanAttribute.NAME) + return self.get_attribute(SentrySpanAttribute.NAME) @name.setter def name(self, value): @@ -1509,6 +1503,12 @@ def set_data(self, key, value): # TODO-neel-potel we cannot add dicts here self.set_attribute(key, value) + def get_attribute(self, name): + # type: (str) -> Optional[Any] + if not isinstance(self._otel_span, ReadableSpan): + return None + return self._otel_span.attributes.get(name) + def set_attribute(self, key, value): # type: (str, Any) -> None self._otel_span.set_attribute(key, value) diff --git a/tests/integrations/clickhouse_driver/test_clickhouse_driver.py b/tests/integrations/clickhouse_driver/test_clickhouse_driver.py index 3b07a82f03..6378919b06 100644 --- a/tests/integrations/clickhouse_driver/test_clickhouse_driver.py +++ b/tests/integrations/clickhouse_driver/test_clickhouse_driver.py @@ -16,6 +16,8 @@ if clickhouse_driver.VERSION < (0, 2, 6): EXPECT_PARAMS_IN_SELECT = False +PARAMS_SERIALIZER = str + def test_clickhouse_client_breadcrumbs(sentry_init, capture_events) -> None: sentry_init( @@ -142,7 +144,7 @@ def test_clickhouse_client_breadcrumbs_with_pii(sentry_init, capture_events) -> "db.user": "default", "server.address": "localhost", "server.port": 9000, - "db.result": [], + "db.result": str([]), }, "message": "DROP TABLE IF EXISTS test", "type": "default", @@ -155,7 +157,7 @@ def test_clickhouse_client_breadcrumbs_with_pii(sentry_init, capture_events) -> "db.user": "default", "server.address": "localhost", "server.port": 9000, - "db.result": [], + "db.result": str([]), }, "message": "CREATE TABLE test (x Int32) ENGINE = Memory", "type": "default", @@ -168,7 +170,7 @@ def test_clickhouse_client_breadcrumbs_with_pii(sentry_init, capture_events) -> "db.user": "default", "server.address": "localhost", "server.port": 9000, - "db.params": [{"x": 100}], + "db.params": PARAMS_SERIALIZER([{"x": 100}]), }, "message": "INSERT INTO test (x) VALUES", "type": "default", @@ -181,7 +183,7 @@ def test_clickhouse_client_breadcrumbs_with_pii(sentry_init, capture_events) -> "db.user": "default", "server.address": "localhost", "server.port": 9000, - "db.params": [[170], [200]], + "db.params": PARAMS_SERIALIZER([[170], [200]]), }, "message": "INSERT INTO test (x) VALUES", "type": "default", @@ -194,8 +196,8 @@ def test_clickhouse_client_breadcrumbs_with_pii(sentry_init, capture_events) -> "db.user": "default", "server.address": "localhost", "server.port": 9000, - "db.result": [[370]], - "db.params": {"minv": 150}, + "db.result": str([[370]]), + "db.params": PARAMS_SERIALIZER({"minv": 150}), }, "message": "SELECT sum(x) FROM test WHERE x > 150", "type": "default", @@ -250,13 +252,15 @@ def test_clickhouse_client_spans( "origin": "auto.db.clickhouse_driver", "description": "DROP TABLE IF EXISTS test", "data": { + "sentry.name": "DROP TABLE IF EXISTS test", + "sentry.origin": "auto.db.clickhouse_driver", + "sentry.op": "db", "db.system": "clickhouse", "db.name": "", "db.user": "default", "server.address": "localhost", "server.port": 9000, }, - "same_process_as_parent": True, "trace_id": transaction_trace_id, "parent_span_id": transaction_span_id, }, @@ -265,13 +269,15 @@ def test_clickhouse_client_spans( "origin": "auto.db.clickhouse_driver", "description": "CREATE TABLE test (x Int32) ENGINE = Memory", "data": { + "sentry.name": "CREATE TABLE test (x Int32) ENGINE = Memory", + "sentry.origin": "auto.db.clickhouse_driver", + "sentry.op": "db", "db.system": "clickhouse", "db.name": "", "db.user": "default", "server.address": "localhost", "server.port": 9000, }, - "same_process_as_parent": True, "trace_id": transaction_trace_id, "parent_span_id": transaction_span_id, }, @@ -280,13 +286,15 @@ def test_clickhouse_client_spans( "origin": "auto.db.clickhouse_driver", "description": "INSERT INTO test (x) VALUES", "data": { + "sentry.name": "INSERT INTO test (x) VALUES", + "sentry.origin": "auto.db.clickhouse_driver", + "sentry.op": "db", "db.system": "clickhouse", "db.name": "", "db.user": "default", "server.address": "localhost", "server.port": 9000, }, - "same_process_as_parent": True, "trace_id": transaction_trace_id, "parent_span_id": transaction_span_id, }, @@ -295,13 +303,15 @@ def test_clickhouse_client_spans( "origin": "auto.db.clickhouse_driver", "description": "INSERT INTO test (x) VALUES", "data": { + "sentry.name": "INSERT INTO test (x) VALUES", + "sentry.origin": "auto.db.clickhouse_driver", + "sentry.op": "db", "db.system": "clickhouse", "db.name": "", "db.user": "default", "server.address": "localhost", "server.port": 9000, }, - "same_process_as_parent": True, "trace_id": transaction_trace_id, "parent_span_id": transaction_span_id, }, @@ -310,13 +320,15 @@ def test_clickhouse_client_spans( "origin": "auto.db.clickhouse_driver", "description": "SELECT sum(x) FROM test WHERE x > 150", "data": { + "sentry.name": "SELECT sum(x) FROM test WHERE x > 150", + "sentry.origin": "auto.db.clickhouse_driver", + "sentry.op": "db", "db.system": "clickhouse", "db.name": "", "db.user": "default", "server.address": "localhost", "server.port": 9000, }, - "same_process_as_parent": True, "trace_id": transaction_trace_id, "parent_span_id": transaction_span_id, }, @@ -332,6 +344,8 @@ def test_clickhouse_client_spans( span.pop("span_id", None) span.pop("start_timestamp", None) span.pop("timestamp", None) + span.pop("same_process_as_parent", None) + span.pop("status", None) assert event["spans"] == expected_spans @@ -373,14 +387,17 @@ def test_clickhouse_client_spans_with_pii( "origin": "auto.db.clickhouse_driver", "description": "DROP TABLE IF EXISTS test", "data": { + "sentry.name": "DROP TABLE IF EXISTS test", + "sentry.origin": "auto.db.clickhouse_driver", + "sentry.op": "db", "db.system": "clickhouse", "db.name": "", "db.user": "default", "server.address": "localhost", "server.port": 9000, - "db.result": [], + "db.query.text": "DROP TABLE IF EXISTS test", + "db.result": str([]), }, - "same_process_as_parent": True, "trace_id": transaction_trace_id, "parent_span_id": transaction_span_id, }, @@ -389,14 +406,17 @@ def test_clickhouse_client_spans_with_pii( "origin": "auto.db.clickhouse_driver", "description": "CREATE TABLE test (x Int32) ENGINE = Memory", "data": { + "sentry.name": "CREATE TABLE test (x Int32) ENGINE = Memory", + "sentry.origin": "auto.db.clickhouse_driver", + "sentry.op": "db", "db.system": "clickhouse", "db.name": "", "db.user": "default", + "db.query.text": "CREATE TABLE test (x Int32) ENGINE = Memory", + "db.result": str([]), "server.address": "localhost", "server.port": 9000, - "db.result": [], }, - "same_process_as_parent": True, "trace_id": transaction_trace_id, "parent_span_id": transaction_span_id, }, @@ -405,14 +425,17 @@ def test_clickhouse_client_spans_with_pii( "origin": "auto.db.clickhouse_driver", "description": "INSERT INTO test (x) VALUES", "data": { + "sentry.name": "INSERT INTO test (x) VALUES", + "sentry.origin": "auto.db.clickhouse_driver", + "sentry.op": "db", "db.system": "clickhouse", "db.name": "", "db.user": "default", + "db.query.text": "INSERT INTO test (x) VALUES", + "db.params": PARAMS_SERIALIZER([{"x": 100}]), "server.address": "localhost", "server.port": 9000, - "db.params": [{"x": 100}], }, - "same_process_as_parent": True, "trace_id": transaction_trace_id, "parent_span_id": transaction_span_id, }, @@ -421,14 +444,16 @@ def test_clickhouse_client_spans_with_pii( "origin": "auto.db.clickhouse_driver", "description": "INSERT INTO test (x) VALUES", "data": { + "sentry.name": "INSERT INTO test (x) VALUES", + "sentry.origin": "auto.db.clickhouse_driver", + "sentry.op": "db", "db.system": "clickhouse", "db.name": "", "db.user": "default", + "db.query.text": "INSERT INTO test (x) VALUES", "server.address": "localhost", "server.port": 9000, - "db.params": [[170], [200]], }, - "same_process_as_parent": True, "trace_id": transaction_trace_id, "parent_span_id": transaction_span_id, }, @@ -437,15 +462,18 @@ def test_clickhouse_client_spans_with_pii( "origin": "auto.db.clickhouse_driver", "description": "SELECT sum(x) FROM test WHERE x > 150", "data": { + "sentry.name": "SELECT sum(x) FROM test WHERE x > 150", + "sentry.origin": "auto.db.clickhouse_driver", + "sentry.op": "db", "db.system": "clickhouse", "db.name": "", "db.user": "default", + "db.params": PARAMS_SERIALIZER({"minv": 150}), + "db.query.text": "SELECT sum(x) FROM test WHERE x > 150", + "db.result": str([(370,)]), "server.address": "localhost", "server.port": 9000, - "db.params": {"minv": 150}, - "db.result": [[370]], }, - "same_process_as_parent": True, "trace_id": transaction_trace_id, "parent_span_id": transaction_span_id, }, @@ -461,6 +489,8 @@ def test_clickhouse_client_spans_with_pii( span.pop("span_id", None) span.pop("start_timestamp", None) span.pop("timestamp", None) + span.pop("same_process_as_parent", None) + span.pop("status", None) assert event["spans"] == expected_spans @@ -592,7 +622,7 @@ def test_clickhouse_dbapi_breadcrumbs_with_pii(sentry_init, capture_events) -> N "db.user": "default", "server.address": "localhost", "server.port": 9000, - "db.result": [[], []], + "db.result": str([[], []]), }, "message": "DROP TABLE IF EXISTS test", "type": "default", @@ -605,7 +635,7 @@ def test_clickhouse_dbapi_breadcrumbs_with_pii(sentry_init, capture_events) -> N "db.user": "default", "server.address": "localhost", "server.port": 9000, - "db.result": [[], []], + "db.result": str([[], []]), }, "message": "CREATE TABLE test (x Int32) ENGINE = Memory", "type": "default", @@ -618,7 +648,7 @@ def test_clickhouse_dbapi_breadcrumbs_with_pii(sentry_init, capture_events) -> N "db.user": "default", "server.address": "localhost", "server.port": 9000, - "db.params": [{"x": 100}], + "db.params": PARAMS_SERIALIZER([{"x": 100}]), }, "message": "INSERT INTO test (x) VALUES", "type": "default", @@ -631,7 +661,7 @@ def test_clickhouse_dbapi_breadcrumbs_with_pii(sentry_init, capture_events) -> N "db.user": "default", "server.address": "localhost", "server.port": 9000, - "db.params": [[170], [200]], + "db.params": PARAMS_SERIALIZER([[170], [200]]), }, "message": "INSERT INTO test (x) VALUES", "type": "default", @@ -644,8 +674,8 @@ def test_clickhouse_dbapi_breadcrumbs_with_pii(sentry_init, capture_events) -> N "db.user": "default", "server.address": "localhost", "server.port": 9000, - "db.params": {"minv": 150}, - "db.result": [[["370"]], [["'sum(x)'", "'Int64'"]]], + "db.params": PARAMS_SERIALIZER({"minv": 150}), + "db.result": str([[["370"]], [["'sum(x)'", "'Int64'"]]]), }, "message": "SELECT sum(x) FROM test WHERE x > 150", "type": "default", @@ -698,13 +728,15 @@ def test_clickhouse_dbapi_spans(sentry_init, capture_events, capture_envelopes) "origin": "auto.db.clickhouse_driver", "description": "DROP TABLE IF EXISTS test", "data": { + "sentry.name": "DROP TABLE IF EXISTS test", + "sentry.origin": "auto.db.clickhouse_driver", + "sentry.op": "db", "db.system": "clickhouse", "db.name": "", "db.user": "default", "server.address": "localhost", "server.port": 9000, }, - "same_process_as_parent": True, "trace_id": transaction_trace_id, "parent_span_id": transaction_span_id, }, @@ -713,13 +745,15 @@ def test_clickhouse_dbapi_spans(sentry_init, capture_events, capture_envelopes) "origin": "auto.db.clickhouse_driver", "description": "CREATE TABLE test (x Int32) ENGINE = Memory", "data": { + "sentry.name": "CREATE TABLE test (x Int32) ENGINE = Memory", + "sentry.origin": "auto.db.clickhouse_driver", + "sentry.op": "db", "db.system": "clickhouse", "db.name": "", "db.user": "default", "server.address": "localhost", "server.port": 9000, }, - "same_process_as_parent": True, "trace_id": transaction_trace_id, "parent_span_id": transaction_span_id, }, @@ -728,13 +762,15 @@ def test_clickhouse_dbapi_spans(sentry_init, capture_events, capture_envelopes) "origin": "auto.db.clickhouse_driver", "description": "INSERT INTO test (x) VALUES", "data": { + "sentry.name": "INSERT INTO test (x) VALUES", + "sentry.origin": "auto.db.clickhouse_driver", + "sentry.op": "db", "db.system": "clickhouse", "db.name": "", "db.user": "default", "server.address": "localhost", "server.port": 9000, }, - "same_process_as_parent": True, "trace_id": transaction_trace_id, "parent_span_id": transaction_span_id, }, @@ -743,13 +779,15 @@ def test_clickhouse_dbapi_spans(sentry_init, capture_events, capture_envelopes) "origin": "auto.db.clickhouse_driver", "description": "INSERT INTO test (x) VALUES", "data": { + "sentry.name": "INSERT INTO test (x) VALUES", + "sentry.origin": "auto.db.clickhouse_driver", + "sentry.op": "db", "db.system": "clickhouse", "db.name": "", "db.user": "default", "server.address": "localhost", "server.port": 9000, }, - "same_process_as_parent": True, "trace_id": transaction_trace_id, "parent_span_id": transaction_span_id, }, @@ -758,13 +796,15 @@ def test_clickhouse_dbapi_spans(sentry_init, capture_events, capture_envelopes) "origin": "auto.db.clickhouse_driver", "description": "SELECT sum(x) FROM test WHERE x > 150", "data": { + "sentry.name": "SELECT sum(x) FROM test WHERE x > 150", + "sentry.origin": "auto.db.clickhouse_driver", + "sentry.op": "db", "db.system": "clickhouse", "db.name": "", "db.user": "default", "server.address": "localhost", "server.port": 9000, }, - "same_process_as_parent": True, "trace_id": transaction_trace_id, "parent_span_id": transaction_span_id, }, @@ -780,6 +820,7 @@ def test_clickhouse_dbapi_spans(sentry_init, capture_events, capture_envelopes) span.pop("span_id", None) span.pop("start_timestamp", None) span.pop("timestamp", None) + span.pop("status") assert event["spans"] == expected_spans @@ -821,14 +862,17 @@ def test_clickhouse_dbapi_spans_with_pii( "origin": "auto.db.clickhouse_driver", "description": "DROP TABLE IF EXISTS test", "data": { + "sentry.name": "DROP TABLE IF EXISTS test", + "sentry.origin": "auto.db.clickhouse_driver", + "sentry.op": "db", "db.system": "clickhouse", "db.name": "", "db.user": "default", + "db.query.text": "DROP TABLE IF EXISTS test", + "db.result": str(([], [])), "server.address": "localhost", "server.port": 9000, - "db.result": [[], []], }, - "same_process_as_parent": True, "trace_id": transaction_trace_id, "parent_span_id": transaction_span_id, }, @@ -837,14 +881,17 @@ def test_clickhouse_dbapi_spans_with_pii( "origin": "auto.db.clickhouse_driver", "description": "CREATE TABLE test (x Int32) ENGINE = Memory", "data": { + "sentry.name": "CREATE TABLE test (x Int32) ENGINE = Memory", + "sentry.origin": "auto.db.clickhouse_driver", + "sentry.op": "db", "db.system": "clickhouse", "db.name": "", "db.user": "default", + "db.query.text": "CREATE TABLE test (x Int32) ENGINE = Memory", + "db.result": str(([], [])), "server.address": "localhost", "server.port": 9000, - "db.result": [[], []], }, - "same_process_as_parent": True, "trace_id": transaction_trace_id, "parent_span_id": transaction_span_id, }, @@ -853,14 +900,17 @@ def test_clickhouse_dbapi_spans_with_pii( "origin": "auto.db.clickhouse_driver", "description": "INSERT INTO test (x) VALUES", "data": { + "sentry.name": "INSERT INTO test (x) VALUES", + "sentry.origin": "auto.db.clickhouse_driver", + "sentry.op": "db", "db.system": "clickhouse", "db.name": "", "db.user": "default", + "db.query.text": "INSERT INTO test (x) VALUES", + "db.params": PARAMS_SERIALIZER([{"x": 100}]), "server.address": "localhost", "server.port": 9000, - "db.params": [{"x": 100}], }, - "same_process_as_parent": True, "trace_id": transaction_trace_id, "parent_span_id": transaction_span_id, }, @@ -869,14 +919,17 @@ def test_clickhouse_dbapi_spans_with_pii( "origin": "auto.db.clickhouse_driver", "description": "INSERT INTO test (x) VALUES", "data": { + "sentry.name": "INSERT INTO test (x) VALUES", + "sentry.origin": "auto.db.clickhouse_driver", + "sentry.op": "db", "db.system": "clickhouse", "db.name": "", "db.user": "default", + "db.query.text": "INSERT INTO test (x) VALUES", + "db.params": PARAMS_SERIALIZER([[170], [200]]), "server.address": "localhost", "server.port": 9000, - "db.params": [[170], [200]], }, - "same_process_as_parent": True, "trace_id": transaction_trace_id, "parent_span_id": transaction_span_id, }, @@ -885,15 +938,18 @@ def test_clickhouse_dbapi_spans_with_pii( "origin": "auto.db.clickhouse_driver", "description": "SELECT sum(x) FROM test WHERE x > 150", "data": { + "sentry.name": "SELECT sum(x) FROM test WHERE x > 150", + "sentry.origin": "auto.db.clickhouse_driver", + "sentry.op": "db", "db.system": "clickhouse", "db.name": "", "db.user": "default", + "db.query.text": "SELECT sum(x) FROM test WHERE x > 150", + "db.params": PARAMS_SERIALIZER({"minv": 150}), + "db.result": str(([(370,)], [("sum(x)", "Int64")])), "server.address": "localhost", "server.port": 9000, - "db.params": {"minv": 150}, - "db.result": [[[370]], [["sum(x)", "Int64"]]], }, - "same_process_as_parent": True, "trace_id": transaction_trace_id, "parent_span_id": transaction_span_id, }, @@ -909,6 +965,8 @@ def test_clickhouse_dbapi_spans_with_pii( span.pop("span_id", None) span.pop("start_timestamp", None) span.pop("timestamp", None) + span.pop("same_process_as_parent", None) + span.pop("status", None) assert event["spans"] == expected_spans From 1c7274717d4eabf13054678a890934d8b0b7cfb0 Mon Sep 17 00:00:00 2001 From: Ivana Kellyer Date: Fri, 11 Oct 2024 14:54:30 +0200 Subject: [PATCH 060/244] Only `add` to trace state if key does not exist (#3645) `trace_state.add` will never overwrite existing entries -- this is good. However, everytime it bails, it logs a warning, which then shows up in breadcrumbs in tests. With this commit we explicitly check before adding. --- .../integrations/opentelemetry/sampler.py | 23 +++++++++++-------- 1 file changed, 14 insertions(+), 9 deletions(-) diff --git a/sentry_sdk/integrations/opentelemetry/sampler.py b/sentry_sdk/integrations/opentelemetry/sampler.py index 404957f028..8ffad41b86 100644 --- a/sentry_sdk/integrations/opentelemetry/sampler.py +++ b/sentry_sdk/integrations/opentelemetry/sampler.py @@ -48,10 +48,13 @@ def get_parent_sampled(parent_context, trace_id): def dropped_result(span_context, attributes, sample_rate=None): # type: (SpanContext, Attributes, Optional[float]) -> SamplingResult - # note that trace_state.add will NOT overwrite existing entries - # so these will only be added the first time in a root span sampling decision - trace_state = span_context.trace_state.add(TRACESTATE_SAMPLED_KEY, "false") - if sample_rate: + # these will only be added the first time in a root span sampling decision + trace_state = span_context.trace_state + + if TRACESTATE_SAMPLED_KEY not in trace_state: + trace_state = trace_state.add(TRACESTATE_SAMPLED_KEY, "false") + + if sample_rate and TRACESTATE_SAMPLE_RATE_KEY not in trace_state: trace_state = trace_state.add(TRACESTATE_SAMPLE_RATE_KEY, str(sample_rate)) return SamplingResult( @@ -63,11 +66,13 @@ def dropped_result(span_context, attributes, sample_rate=None): def sampled_result(span_context, attributes, sample_rate): # type: (SpanContext, Attributes, float) -> SamplingResult - # note that trace_state.add will NOT overwrite existing entries - # so these will only be added the first time in a root span sampling decision - trace_state = span_context.trace_state.add(TRACESTATE_SAMPLED_KEY, "true").add( - TRACESTATE_SAMPLE_RATE_KEY, str(sample_rate) - ) + # these will only be added the first time in a root span sampling decision + trace_state = span_context.trace_state + + if TRACESTATE_SAMPLED_KEY not in trace_state: + trace_state = trace_state.add(TRACESTATE_SAMPLED_KEY, "true") + if TRACESTATE_SAMPLE_RATE_KEY not in trace_state: + trace_state = trace_state.add(TRACESTATE_SAMPLE_RATE_KEY, str(sample_rate)) return SamplingResult( Decision.RECORD_AND_SAMPLE, From e205f2daefcaa625e232322442419e767d557ddd Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Fri, 11 Oct 2024 16:37:00 +0200 Subject: [PATCH 061/244] Make transaction names work (#3643) * while sending the event in the final payload, the transaction name will be picked up from the attributes `sentry.name` or fallback to the `description` when we only have otel instrumentation * for populating DSC in the head case, we are doing a best attempt solution and fetching the transaction name/source from the current and isolation scopes * NOTE that there are cases where this will be inaccurate if we never set the transaction name on the scope in some integration, so we will go through and fix those cases separately. --- .../integrations/opentelemetry/consts.py | 1 + .../opentelemetry/potel_span_processor.py | 7 ++-- .../integrations/opentelemetry/utils.py | 36 +++++++++++++++++-- sentry_sdk/scope.py | 10 ++++++ sentry_sdk/tracing.py | 18 ++++++---- tests/integrations/flask/test_flask.py | 2 ++ 6 files changed, 63 insertions(+), 11 deletions(-) diff --git a/sentry_sdk/integrations/opentelemetry/consts.py b/sentry_sdk/integrations/opentelemetry/consts.py index a71e304cf5..6d7c91f3f1 100644 --- a/sentry_sdk/integrations/opentelemetry/consts.py +++ b/sentry_sdk/integrations/opentelemetry/consts.py @@ -28,4 +28,5 @@ class SentrySpanAttribute: MEASUREMENT = "sentry.measurement" TAG = "sentry.tag" NAME = "sentry.name" + SOURCE = "sentry.source" CONTEXT = "sentry.context" diff --git a/sentry_sdk/integrations/opentelemetry/potel_span_processor.py b/sentry_sdk/integrations/opentelemetry/potel_span_processor.py index 0076743245..ed31608516 100644 --- a/sentry_sdk/integrations/opentelemetry/potel_span_processor.py +++ b/sentry_sdk/integrations/opentelemetry/potel_span_processor.py @@ -18,6 +18,7 @@ convert_from_otel_timestamp, extract_span_attributes, extract_span_data, + extract_transaction_name_source, get_trace_context, ) from sentry_sdk.integrations.opentelemetry.consts import ( @@ -137,6 +138,7 @@ def _root_span_to_transaction_event(self, span): if event is None: return None + transaction_name, transaction_source = extract_transaction_name_source(span) span_data = extract_span_data(span) (_, description, _, http_status, _) = span_data @@ -152,9 +154,8 @@ def _root_span_to_transaction_event(self, span): event.update( { "type": "transaction", - "transaction": description, - # TODO-neel-potel tx source based on integration - "transaction_info": {"source": "custom"}, + "transaction": transaction_name or description, + "transaction_info": {"source": transaction_source or "custom"}, "contexts": contexts, } ) diff --git a/sentry_sdk/integrations/opentelemetry/utils.py b/sentry_sdk/integrations/opentelemetry/utils.py index ddc9f0c25a..dc0fff7f8c 100644 --- a/sentry_sdk/integrations/opentelemetry/utils.py +++ b/sentry_sdk/integrations/opentelemetry/utils.py @@ -19,7 +19,7 @@ from sentry_sdk.utils import Dsn from sentry_sdk.consts import SPANSTATUS, OP from sentry_sdk.tracing import get_span_status_from_http_code, DEFAULT_SPAN_ORIGIN -from sentry_sdk.tracing_utils import Baggage +from sentry_sdk.tracing_utils import Baggage, LOW_QUALITY_TRANSACTION_SOURCES from sentry_sdk.integrations.opentelemetry.consts import SentrySpanAttribute from sentry_sdk._types import TYPE_CHECKING @@ -98,6 +98,16 @@ def convert_to_otel_timestamp(time): return int(time * 1e9) +def extract_transaction_name_source(span): + # type: (ReadableSpan) -> tuple[Optional[str], Optional[str]] + if not span.attributes: + return (None, None) + return ( + cast("Optional[str]", span.attributes.get(SentrySpanAttribute.NAME)), + cast("Optional[str]", span.attributes.get(SentrySpanAttribute.SOURCE)), + ) + + def extract_span_data(span): # type: (ReadableSpan) -> OtelExtractedSpanData op = span.name @@ -394,5 +404,27 @@ def get_trace_state(span): Baggage.SENTRY_PREFIX + "public_key", Dsn(options["dsn"]).public_key ) - # TODO-neel-potel head dsc transaction name + # we cannot access the root span in most cases here, so we HAVE to rely on the + # scopes to carry the correct transaction name/source. + # IDEALLY we will always move to using the isolation scope here + # but our integrations do all kinds of stuff with both isolation and current + # so I am keeping both for now as a best attempt solution till we get to a better state. + isolation_scope = sentry_sdk.get_isolation_scope() + current_scope = sentry_sdk.get_current_scope() + if ( + current_scope.transaction_name + and current_scope.transaction_source not in LOW_QUALITY_TRANSACTION_SOURCES + ): + trace_state = trace_state.update( + Baggage.SENTRY_PREFIX + "transaction", current_scope.transaction_name + ) + elif ( + isolation_scope.transaction_name + and isolation_scope.transaction_source + not in LOW_QUALITY_TRANSACTION_SOURCES + ): + trace_state = trace_state.update( + Baggage.SENTRY_PREFIX + "transaction", isolation_scope.transaction_name + ) + return trace_state diff --git a/sentry_sdk/scope.py b/sentry_sdk/scope.py index fcc870de95..09a7f9ee89 100644 --- a/sentry_sdk/scope.py +++ b/sentry_sdk/scope.py @@ -741,6 +741,16 @@ def set_transaction_name(self, name, source=None): if source: self._transaction_info["source"] = source + @property + def transaction_name(self): + # type: () -> Optional[str] + return self._transaction + + @property + def transaction_source(self): + # type: () -> Optional[str] + return self._transaction_info.get("source") + @_attr_setter def user(self, value): # type: (Optional[Dict[str, Any]]) -> None diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py index 5b0795af62..d4033b52ef 100644 --- a/sentry_sdk/tracing.py +++ b/sentry_sdk/tracing.py @@ -1292,7 +1292,7 @@ def description(self, value): from sentry_sdk.integrations.opentelemetry.consts import SentrySpanAttribute if value is not None: - self._otel_span.set_attribute(SentrySpanAttribute.DESCRIPTION, value) + self.set_attribute(SentrySpanAttribute.DESCRIPTION, value) @property def origin(self): @@ -1307,7 +1307,7 @@ def origin(self, value): from sentry_sdk.integrations.opentelemetry.consts import SentrySpanAttribute if value is not None: - self._otel_span.set_attribute(SentrySpanAttribute.ORIGIN, value) + self.set_attribute(SentrySpanAttribute.ORIGIN, value) @property def containing_transaction(self): @@ -1380,7 +1380,7 @@ def op(self, value): from sentry_sdk.integrations.opentelemetry.consts import SentrySpanAttribute if value is not None: - self._otel_span.set_attribute(SentrySpanAttribute.OP, value) + self.set_attribute(SentrySpanAttribute.OP, value) @property def name(self): @@ -1395,17 +1395,23 @@ def name(self, value): from sentry_sdk.integrations.opentelemetry.consts import SentrySpanAttribute if value is not None: - self._otel_span.set_attribute(SentrySpanAttribute.NAME, value) + self.set_attribute(SentrySpanAttribute.NAME, value) @property def source(self): # type: () -> str - pass + from sentry_sdk.integrations.opentelemetry.consts import SentrySpanAttribute + + return ( + self.get_attribute(SentrySpanAttribute.SOURCE) or TRANSACTION_SOURCE_CUSTOM + ) @source.setter def source(self, value): # type: (str) -> None - pass + from sentry_sdk.integrations.opentelemetry.consts import SentrySpanAttribute + + self.set_attribute(SentrySpanAttribute.SOURCE, value) @property def start_timestamp(self): diff --git a/tests/integrations/flask/test_flask.py b/tests/integrations/flask/test_flask.py index 6febb12b8b..4e92df7e7c 100644 --- a/tests/integrations/flask/test_flask.py +++ b/tests/integrations/flask/test_flask.py @@ -751,12 +751,14 @@ def hi_tx(): assert transaction_event["type"] == "transaction" assert transaction_event["transaction"] == "hi_tx" + assert transaction_event["transaction_info"] == {"source": "component"} assert transaction_event["contexts"]["trace"]["status"] == "ok" assert transaction_event["tags"]["view"] == "yes" assert transaction_event["tags"]["before_request"] == "yes" assert message_event["message"] == "hi" assert message_event["transaction"] == "hi_tx" + assert message_event["transaction_info"] == {"source": "component"} assert message_event["tags"]["view"] == "yes" assert message_event["tags"]["before_request"] == "yes" From f9187be55727e2a518abd63450f2bce62e8a703f Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Mon, 14 Oct 2024 12:43:27 +0200 Subject: [PATCH 062/244] Fix tests using `_span_recorder` (#3633) Instead of getting span from ._span_recorder get it from envelope. --- tests/integrations/aiohttp/test_aiohttp.py | 27 +++++--- tests/integrations/httpx/test_httpx.py | 61 +++++++++++------- tests/integrations/stdlib/test_httplib.py | 73 +++++++++++++--------- 3 files changed, 97 insertions(+), 64 deletions(-) diff --git a/tests/integrations/aiohttp/test_aiohttp.py b/tests/integrations/aiohttp/test_aiohttp.py index cd65e7cdd5..bafb639c34 100644 --- a/tests/integrations/aiohttp/test_aiohttp.py +++ b/tests/integrations/aiohttp/test_aiohttp.py @@ -517,12 +517,16 @@ async def handler(request): @pytest.mark.asyncio -async def test_outgoing_trace_headers(sentry_init, aiohttp_raw_server, aiohttp_client): +async def test_outgoing_trace_headers( + sentry_init, aiohttp_raw_server, aiohttp_client, capture_envelopes +): sentry_init( integrations=[AioHttpIntegration()], traces_sample_rate=1.0, ) + envelopes = capture_envelopes() + async def handler(request): return web.Response(text="OK") @@ -536,15 +540,18 @@ async def handler(request): ) as transaction: client = await aiohttp_client(raw_server) resp = await client.get("/") - request_span = transaction._span_recorder.spans[-1] - - assert resp.request_info.headers[ - "sentry-trace" - ] == "{trace_id}-{parent_span_id}-{sampled}".format( - trace_id=transaction.trace_id, - parent_span_id=request_span.span_id, - sampled=1, - ) + + (envelope,) = envelopes + transaction = envelope.get_transaction_event() + request_span = transaction["spans"][-1] + + assert resp.request_info.headers[ + "sentry-trace" + ] == "{trace_id}-{parent_span_id}-{sampled}".format( + trace_id=transaction["contexts"]["trace"]["trace_id"], + parent_span_id=request_span["span_id"], + sampled=1, + ) @pytest.mark.asyncio diff --git a/tests/integrations/httpx/test_httpx.py b/tests/integrations/httpx/test_httpx.py index 17bf7017a5..f31a665245 100644 --- a/tests/integrations/httpx/test_httpx.py +++ b/tests/integrations/httpx/test_httpx.py @@ -61,8 +61,13 @@ def before_breadcrumb(crumb, hint): "httpx_client", (httpx.Client(), httpx.AsyncClient()), ) -def test_outgoing_trace_headers(sentry_init, httpx_client): - sentry_init(traces_sample_rate=1.0, integrations=[HttpxIntegration()]) +def test_outgoing_trace_headers(sentry_init, httpx_client, capture_envelopes): + sentry_init( + traces_sample_rate=1.0, + integrations=[HttpxIntegration()], + ) + + envelopes = capture_envelopes() url = "http://example.com/" responses.add(responses.GET, url, status=200) @@ -79,27 +84,34 @@ def test_outgoing_trace_headers(sentry_init, httpx_client): else: response = httpx_client.get(url) - request_span = transaction._span_recorder.spans[-1] - assert response.request.headers[ - "sentry-trace" - ] == "{trace_id}-{parent_span_id}-{sampled}".format( - trace_id=transaction.trace_id, - parent_span_id=request_span.span_id, - sampled=1, - ) + (envelope,) = envelopes + transaction = envelope.get_transaction_event() + request_span = transaction["spans"][-1] + + assert response.request.headers[ + "sentry-trace" + ] == "{trace_id}-{parent_span_id}-{sampled}".format( + trace_id=transaction["contexts"]["trace"]["trace_id"], + parent_span_id=request_span["span_id"], + sampled=1, + ) @pytest.mark.parametrize( "httpx_client", (httpx.Client(), httpx.AsyncClient()), ) -def test_outgoing_trace_headers_append_to_baggage(sentry_init, httpx_client): +def test_outgoing_trace_headers_append_to_baggage( + sentry_init, httpx_client, capture_envelopes +): sentry_init( traces_sample_rate=1.0, integrations=[HttpxIntegration()], release="d08ebdb9309e1b004c6f52202de58a09c2268e42", ) + envelopes = capture_envelopes() + url = "http://example.com/" responses.add(responses.GET, url, status=200) @@ -115,18 +127,21 @@ def test_outgoing_trace_headers_append_to_baggage(sentry_init, httpx_client): else: response = httpx_client.get(url, headers={"baGGage": "custom=data"}) - request_span = transaction._span_recorder.spans[-1] - assert response.request.headers[ - "sentry-trace" - ] == "{trace_id}-{parent_span_id}-{sampled}".format( - trace_id=transaction.trace_id, - parent_span_id=request_span.span_id, - sampled=1, - ) - assert ( - response.request.headers["baggage"] - == "custom=data,sentry-trace_id=01234567890123456789012345678901,sentry-environment=production,sentry-release=d08ebdb9309e1b004c6f52202de58a09c2268e42,sentry-transaction=/interactions/other-dogs/new-dog,sentry-sample_rate=1.0,sentry-sampled=true" - ) + (envelope,) = envelopes + transaction = envelope.get_transaction_event() + request_span = transaction["spans"][-1] + + assert response.request.headers[ + "sentry-trace" + ] == "{trace_id}-{parent_span_id}-{sampled}".format( + trace_id=transaction["contexts"]["trace"]["trace_id"], + parent_span_id=request_span["span_id"], + sampled=1, + ) + assert ( + response.request.headers["baggage"] + == "custom=data,sentry-trace_id=01234567890123456789012345678901,sentry-environment=production,sentry-release=d08ebdb9309e1b004c6f52202de58a09c2268e42,sentry-transaction=/interactions/other-dogs/new-dog,sentry-sample_rate=1.0,sentry-sampled=true" + ) @pytest.mark.parametrize( diff --git a/tests/integrations/stdlib/test_httplib.py b/tests/integrations/stdlib/test_httplib.py index c327331608..3d4aa988f6 100644 --- a/tests/integrations/stdlib/test_httplib.py +++ b/tests/integrations/stdlib/test_httplib.py @@ -130,7 +130,7 @@ def test_httplib_misuse(sentry_init, capture_events, request): ) -def test_outgoing_trace_headers(sentry_init, monkeypatch): +def test_outgoing_trace_headers(sentry_init, monkeypatch, capture_envelopes): # HTTPSConnection.send is passed a string containing (among other things) # the headers on the request. Mock it so we can check the headers, and also # so it doesn't try to actually talk to the internet. @@ -139,6 +139,8 @@ def test_outgoing_trace_headers(sentry_init, monkeypatch): sentry_init(traces_sample_rate=1.0) + envelopes = capture_envelopes() + headers = {} headers["baggage"] = ( "other-vendor-value-1=foo;bar;baz, sentry-trace_id=771a43a4192642f0b136d5159a501700, " @@ -163,25 +165,28 @@ def test_outgoing_trace_headers(sentry_init, monkeypatch): key, val = line.split(": ") request_headers[key] = val - request_span = transaction._span_recorder.spans[-1] - expected_sentry_trace = "{trace_id}-{parent_span_id}-{sampled}".format( - trace_id=transaction.trace_id, - parent_span_id=request_span.span_id, - sampled=1, - ) - assert request_headers["sentry-trace"] == expected_sentry_trace + (envelope,) = envelopes + transaction = envelope.get_transaction_event() + request_span = transaction["spans"][-1] - expected_outgoing_baggage = ( - "sentry-trace_id=771a43a4192642f0b136d5159a501700," - "sentry-public_key=49d0f7386ad645858ae85020e393bef3," - "sentry-sample_rate=0.01337," - "sentry-user_id=Am%C3%A9lie" - ) + expected_sentry_trace = "{trace_id}-{parent_span_id}-{sampled}".format( + trace_id=transaction["contexts"]["trace"]["trace_id"], + parent_span_id=request_span["span_id"], + sampled=1, + ) + assert request_headers["sentry-trace"] == expected_sentry_trace + + expected_outgoing_baggage = ( + "sentry-trace_id=771a43a4192642f0b136d5159a501700," + "sentry-public_key=49d0f7386ad645858ae85020e393bef3," + "sentry-sample_rate=0.01337," + "sentry-user_id=Am%C3%A9lie" + ) - assert request_headers["baggage"] == expected_outgoing_baggage + assert request_headers["baggage"] == expected_outgoing_baggage -def test_outgoing_trace_headers_head_sdk(sentry_init, monkeypatch): +def test_outgoing_trace_headers_head_sdk(sentry_init, monkeypatch, capture_envelopes): # HTTPSConnection.send is passed a string containing (among other things) # the headers on the request. Mock it so we can check the headers, and also # so it doesn't try to actually talk to the internet. @@ -192,6 +197,9 @@ def test_outgoing_trace_headers_head_sdk(sentry_init, monkeypatch): monkeypatch.setattr(random, "random", lambda: 0.1) sentry_init(traces_sample_rate=0.5, release="foo") + + envelopes = capture_envelopes() + transaction = Transaction.continue_from_headers({}) with start_transaction(transaction=transaction, name="Head SDK tx") as transaction: @@ -204,23 +212,26 @@ def test_outgoing_trace_headers_head_sdk(sentry_init, monkeypatch): key, val = line.split(": ") request_headers[key] = val - request_span = transaction._span_recorder.spans[-1] - expected_sentry_trace = "{trace_id}-{parent_span_id}-{sampled}".format( - trace_id=transaction.trace_id, - parent_span_id=request_span.span_id, - sampled=1, - ) - assert request_headers["sentry-trace"] == expected_sentry_trace + (envelope,) = envelopes + transaction = envelope.get_transaction_event() + request_span = transaction["spans"][-1] + + expected_sentry_trace = "{trace_id}-{parent_span_id}-{sampled}".format( + trace_id=transaction["contexts"]["trace"]["trace_id"], + parent_span_id=request_span["span_id"], + sampled=1, + ) + assert request_headers["sentry-trace"] == expected_sentry_trace - expected_outgoing_baggage = ( - "sentry-trace_id=%s," - "sentry-environment=production," - "sentry-release=foo," - "sentry-sample_rate=0.5," - "sentry-sampled=%s" - ) % (transaction.trace_id, "true" if transaction.sampled else "false") + expected_outgoing_baggage = ( + "sentry-trace_id=%s," + "sentry-environment=production," + "sentry-release=foo," + "sentry-sample_rate=0.5," + "sentry-sampled=%s" + ) % (transaction.trace_id, "true" if transaction.sampled else "false") - assert request_headers["baggage"] == expected_outgoing_baggage + assert request_headers["baggage"] == expected_outgoing_baggage @pytest.mark.parametrize( From ef7c39cfcf9f55cc22d56a91455eec2fcc545434 Mon Sep 17 00:00:00 2001 From: Ivana Kellyer Date: Tue, 15 Oct 2024 16:45:24 +0200 Subject: [PATCH 063/244] Fix asyncpg spans (#3639) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Change `span.set_data` to `span.set_attribute` - Change `start_transaction`s in the tests to `start_span`s - Stringify `db.params`, `db.paramstyle`, `db.cursor` - ❗ Change how adding query source works. Previously, we first let the DB span finish and then looked at its start and end timestamps to figure out if the query was slow enough to attach query data. With the switch to OTel, we can no longer put stuff on a finished span, meaning the slow query span still has to be alive when adding data to it. So instead of looking at the end timestamp of a finished span, we keep the span alive and instead look at current time at the point when the query is finished. This means the DB span will be longer than the actual query (since it now includes adding query data). --- sentry_sdk/integrations/asyncpg.py | 45 ++++--- sentry_sdk/tracing_utils.py | 15 ++- tests/integrations/asyncpg/test_asyncpg.py | 145 ++++++++++++--------- 3 files changed, 123 insertions(+), 82 deletions(-) diff --git a/sentry_sdk/integrations/asyncpg.py b/sentry_sdk/integrations/asyncpg.py index b05d5615ba..71740cb3aa 100644 --- a/sentry_sdk/integrations/asyncpg.py +++ b/sentry_sdk/integrations/asyncpg.py @@ -40,7 +40,6 @@ def setup_once() -> None: asyncpg.Connection.execute = _wrap_execute( asyncpg.Connection.execute, ) - asyncpg.Connection._execute = _wrap_connection_method( asyncpg.Connection._execute ) @@ -80,8 +79,8 @@ async def _inner(*args: Any, **kwargs: Any) -> T: ) as span: res = await f(*args, **kwargs) - with capture_internal_exceptions(): - add_query_source(span) + with capture_internal_exceptions(): + add_query_source(span) return res @@ -148,7 +147,7 @@ def _inner(*args: Any, **kwargs: Any) -> T: # noqa: N807 ) as span: _set_db_data(span, args[0]) res = f(*args, **kwargs) - span.set_data("db.cursor", res) + span.set_attribute("db.cursor", str(res)) return res @@ -168,21 +167,37 @@ async def _inner(*args: Any, **kwargs: Any) -> T: name="connect", origin=AsyncPGIntegration.origin, ) as span: - span.set_data(SPANDATA.DB_SYSTEM, "postgresql") + span.set_attribute(SPANDATA.DB_SYSTEM, "postgresql") addr = kwargs.get("addr") if addr: try: - span.set_data(SPANDATA.SERVER_ADDRESS, addr[0]) - span.set_data(SPANDATA.SERVER_PORT, addr[1]) + span.set_attribute(SPANDATA.SERVER_ADDRESS, addr[0]) + span.set_attribute(SPANDATA.SERVER_PORT, addr[1]) except IndexError: pass - span.set_data(SPANDATA.DB_NAME, database) - span.set_data(SPANDATA.DB_USER, user) + + span.set_attribute(SPANDATA.DB_NAME, database) + span.set_attribute(SPANDATA.DB_USER, user) with capture_internal_exceptions(): + data = {} + for attr in ( + "db.cursor", + "db.params", + "db.paramstyle", + SPANDATA.DB_NAME, + SPANDATA.DB_SYSTEM, + SPANDATA.DB_USER, + SPANDATA.SERVER_ADDRESS, + SPANDATA.SERVER_PORT, + ): + if span.get_attribute(attr): + data[attr] = span.get_attribute(attr) + sentry_sdk.add_breadcrumb( - message="connect", category="query", data=span._data + message="connect", category="query", data=data ) + res = await f(*args, **kwargs) return res @@ -191,20 +206,20 @@ async def _inner(*args: Any, **kwargs: Any) -> T: def _set_db_data(span: Span, conn: Any) -> None: - span.set_data(SPANDATA.DB_SYSTEM, "postgresql") + span.set_attribute(SPANDATA.DB_SYSTEM, "postgresql") addr = conn._addr if addr: try: - span.set_data(SPANDATA.SERVER_ADDRESS, addr[0]) - span.set_data(SPANDATA.SERVER_PORT, addr[1]) + span.set_attribute(SPANDATA.SERVER_ADDRESS, addr[0]) + span.set_attribute(SPANDATA.SERVER_PORT, addr[1]) except IndexError: pass database = conn._params.database if database: - span.set_data(SPANDATA.DB_NAME, database) + span.set_attribute(SPANDATA.DB_NAME, database) user = conn._params.user if user: - span.set_data(SPANDATA.DB_USER, user) + span.set_attribute(SPANDATA.DB_USER, user) diff --git a/sentry_sdk/tracing_utils.py b/sentry_sdk/tracing_utils.py index 2f4dad738a..b8f7288374 100644 --- a/sentry_sdk/tracing_utils.py +++ b/sentry_sdk/tracing_utils.py @@ -4,7 +4,7 @@ import re import sys from collections.abc import Mapping -from datetime import timedelta +from datetime import datetime, timedelta, timezone from functools import wraps from urllib.parse import quote, unquote import uuid @@ -133,13 +133,13 @@ def record_sql_queries( data = {} if params_list is not None: - data["db.params"] = params_list + data["db.params"] = str(params_list) if paramstyle is not None: - data["db.paramstyle"] = paramstyle + data["db.paramstyle"] = str(paramstyle) if executemany: data["db.executemany"] = True if record_cursor_repr and cursor is not None: - data["db.cursor"] = cursor + data["db.cursor"] = str(cursor) with capture_internal_exceptions(): sentry_sdk.add_breadcrumb(message=query, category="query", data=data) @@ -209,14 +209,17 @@ def add_query_source(span): if not client.is_active(): return - if span.timestamp is None or span.start_timestamp is None: + if span.start_timestamp is None: return should_add_query_source = client.options.get("enable_db_query_source", True) if not should_add_query_source: return - duration = span.timestamp - span.start_timestamp + # We assume here that the query is just ending now. We can't use + # the actual end timestamp of the span because in OTel the span + # can't be finished in order to set any attributes on it. + duration = datetime.now(tz=timezone.utc) - span.start_timestamp threshold = client.options.get("db_query_source_threshold_ms", 0) slow_query = duration / timedelta(milliseconds=1) > threshold diff --git a/tests/integrations/asyncpg/test_asyncpg.py b/tests/integrations/asyncpg/test_asyncpg.py index 8996c8dd1a..adeef37d38 100644 --- a/tests/integrations/asyncpg/test_asyncpg.py +++ b/tests/integrations/asyncpg/test_asyncpg.py @@ -10,14 +10,6 @@ """ import os - - -PG_HOST = os.getenv("SENTRY_PYTHON_TEST_POSTGRES_HOST", "localhost") -PG_PORT = int(os.getenv("SENTRY_PYTHON_TEST_POSTGRES_PORT", "5432")) -PG_USER = os.getenv("SENTRY_PYTHON_TEST_POSTGRES_USER", "postgres") -PG_PASSWORD = os.getenv("SENTRY_PYTHON_TEST_POSTGRES_PASSWORD", "sentry") -PG_NAME = os.getenv("SENTRY_PYTHON_TEST_POSTGRES_NAME", "postgres") - import datetime from contextlib import contextmanager from unittest import mock @@ -28,16 +20,23 @@ from asyncpg import connect, Connection from freezegun import freeze_time -from sentry_sdk import capture_message, start_transaction +from sentry_sdk import capture_message, start_span from sentry_sdk.integrations.asyncpg import AsyncPGIntegration from sentry_sdk.consts import SPANDATA from sentry_sdk.tracing_utils import record_sql_queries from tests.conftest import ApproxDict +PG_HOST = os.getenv("SENTRY_PYTHON_TEST_POSTGRES_HOST", "localhost") +PG_PORT = int(os.getenv("SENTRY_PYTHON_TEST_POSTGRES_PORT", "5432")) +PG_USER = os.getenv("SENTRY_PYTHON_TEST_POSTGRES_USER", "postgres") +PG_PASSWORD = os.getenv("SENTRY_PYTHON_TEST_POSTGRES_PASSWORD", "sentry") +PG_NAME = os.getenv("SENTRY_PYTHON_TEST_POSTGRES_NAME", "postgres") + PG_CONNECTION_URI = "postgresql://{}:{}@{}/{}".format( PG_USER, PG_PASSWORD, PG_HOST, PG_NAME ) + CRUMBS_CONNECT = { "category": "query", "data": ApproxDict( @@ -75,6 +74,7 @@ async def _clean_pg(): async def test_connect(sentry_init, capture_events) -> None: sentry_init( integrations=[AsyncPGIntegration()], + traces_sample_rate=1.0, _experiments={"record_sql_params": True}, ) events = capture_events() @@ -85,7 +85,7 @@ async def test_connect(sentry_init, capture_events) -> None: capture_message("hi") - (event,) = events + event = events[-1] for crumb in event["breadcrumbs"]["values"]: del crumb["timestamp"] @@ -97,6 +97,7 @@ async def test_connect(sentry_init, capture_events) -> None: async def test_execute(sentry_init, capture_events) -> None: sentry_init( integrations=[AsyncPGIntegration()], + traces_sample_rate=1.0, _experiments={"record_sql_params": True}, ) events = capture_events() @@ -124,7 +125,7 @@ async def test_execute(sentry_init, capture_events) -> None: capture_message("hi") - (event,) = events + event = events[-1] for crumb in event["breadcrumbs"]["values"]: del crumb["timestamp"] @@ -162,6 +163,7 @@ async def test_execute(sentry_init, capture_events) -> None: async def test_execute_many(sentry_init, capture_events) -> None: sentry_init( integrations=[AsyncPGIntegration()], + traces_sample_rate=1.0, _experiments={"record_sql_params": True}, ) events = capture_events() @@ -180,7 +182,7 @@ async def test_execute_many(sentry_init, capture_events) -> None: capture_message("hi") - (event,) = events + event = events[-1] for crumb in event["breadcrumbs"]["values"]: del crumb["timestamp"] @@ -200,6 +202,7 @@ async def test_execute_many(sentry_init, capture_events) -> None: async def test_record_params(sentry_init, capture_events) -> None: sentry_init( integrations=[AsyncPGIntegration(record_params=True)], + traces_sample_rate=1.0, _experiments={"record_sql_params": True}, ) events = capture_events() @@ -217,7 +220,7 @@ async def test_record_params(sentry_init, capture_events) -> None: capture_message("hi") - (event,) = events + event = events[-1] for crumb in event["breadcrumbs"]["values"]: del crumb["timestamp"] @@ -227,7 +230,7 @@ async def test_record_params(sentry_init, capture_events) -> None: { "category": "query", "data": { - "db.params": ["Bob", "secret_pw", "datetime.date(1984, 3, 1)"], + "db.params": "('Bob', 'secret_pw', datetime.date(1984, 3, 1))", "db.paramstyle": "format", }, "message": "INSERT INTO users(name, password, dob) VALUES($1, $2, $3)", @@ -240,6 +243,7 @@ async def test_record_params(sentry_init, capture_events) -> None: async def test_cursor(sentry_init, capture_events) -> None: sentry_init( integrations=[AsyncPGIntegration()], + traces_sample_rate=1.0, _experiments={"record_sql_params": True}, ) events = capture_events() @@ -260,13 +264,13 @@ async def test_cursor(sentry_init, capture_events) -> None: async for record in conn.cursor( "SELECT * FROM users WHERE dob > $1", datetime.date(1970, 1, 1) ): - print(record) + pass await conn.close() capture_message("hi") - (event,) = events + event = events[-1] for crumb in event["breadcrumbs"]["values"]: del crumb["timestamp"] @@ -279,14 +283,24 @@ async def test_cursor(sentry_init, capture_events) -> None: "message": "INSERT INTO users(name, password, dob) VALUES($1, $2, $3)", "type": "default", }, - {"category": "query", "data": {}, "message": "BEGIN;", "type": "default"}, + { + "category": "query", + "data": {}, + "message": "BEGIN;", + "type": "default", + }, { "category": "query", "data": {}, "message": "SELECT * FROM users WHERE dob > $1", "type": "default", }, - {"category": "query", "data": {}, "message": "COMMIT;", "type": "default"}, + { + "category": "query", + "data": {}, + "message": "COMMIT;", + "type": "default", + }, ] @@ -294,6 +308,7 @@ async def test_cursor(sentry_init, capture_events) -> None: async def test_cursor_manual(sentry_init, capture_events) -> None: sentry_init( integrations=[AsyncPGIntegration()], + traces_sample_rate=1.0, _experiments={"record_sql_params": True}, ) events = capture_events() @@ -307,24 +322,22 @@ async def test_cursor_manual(sentry_init, capture_events) -> None: ("Alice", "pw", datetime.date(1990, 12, 25)), ], ) - # + async with conn.transaction(): # Postgres requires non-scrollable cursors to be created # and used in a transaction. cur = await conn.cursor( "SELECT * FROM users WHERE dob > $1", datetime.date(1970, 1, 1) ) - record = await cur.fetchrow() - print(record) + await cur.fetchrow() while await cur.forward(1): - record = await cur.fetchrow() - print(record) + await cur.fetchrow() await conn.close() capture_message("hi") - (event,) = events + event = events[-1] for crumb in event["breadcrumbs"]["values"]: del crumb["timestamp"] @@ -337,14 +350,24 @@ async def test_cursor_manual(sentry_init, capture_events) -> None: "message": "INSERT INTO users(name, password, dob) VALUES($1, $2, $3)", "type": "default", }, - {"category": "query", "data": {}, "message": "BEGIN;", "type": "default"}, + { + "category": "query", + "data": {}, + "message": "BEGIN;", + "type": "default", + }, { "category": "query", "data": {}, "message": "SELECT * FROM users WHERE dob > $1", "type": "default", }, - {"category": "query", "data": {}, "message": "COMMIT;", "type": "default"}, + { + "category": "query", + "data": {}, + "message": "COMMIT;", + "type": "default", + }, ] @@ -352,6 +375,7 @@ async def test_cursor_manual(sentry_init, capture_events) -> None: async def test_prepared_stmt(sentry_init, capture_events) -> None: sentry_init( integrations=[AsyncPGIntegration()], + traces_sample_rate=1.0, _experiments={"record_sql_params": True}, ) events = capture_events() @@ -368,14 +392,14 @@ async def test_prepared_stmt(sentry_init, capture_events) -> None: stmt = await conn.prepare("SELECT * FROM users WHERE name = $1") - print(await stmt.fetchval("Bob")) - print(await stmt.fetchval("Alice")) + await stmt.fetchval("Bob") + await stmt.fetchval("Alice") await conn.close() capture_message("hi") - (event,) = events + event = events[-1] for crumb in event["breadcrumbs"]["values"]: del crumb["timestamp"] @@ -401,6 +425,7 @@ async def test_prepared_stmt(sentry_init, capture_events) -> None: async def test_connection_pool(sentry_init, capture_events) -> None: sentry_init( integrations=[AsyncPGIntegration()], + traces_sample_rate=1.0, _experiments={"record_sql_params": True}, ) events = capture_events() @@ -427,7 +452,7 @@ async def test_connection_pool(sentry_init, capture_events) -> None: capture_message("hi") - (event,) = events + event = events[-1] for crumb in event["breadcrumbs"]["values"]: del crumb["timestamp"] @@ -481,7 +506,7 @@ async def test_query_source_disabled(sentry_init, capture_events): events = capture_events() - with start_transaction(name="test_transaction", sampled=True): + with start_span(name="test_span"): conn: Connection = await connect(PG_CONNECTION_URI) await conn.execute( @@ -520,7 +545,7 @@ async def test_query_source_enabled( events = capture_events() - with start_transaction(name="test_transaction", sampled=True): + with start_span(name="test_span"): conn: Connection = await connect(PG_CONNECTION_URI) await conn.execute( @@ -553,7 +578,7 @@ async def test_query_source(sentry_init, capture_events): events = capture_events() - with start_transaction(name="test_transaction", sampled=True): + with start_span(name="test_span"): conn: Connection = await connect(PG_CONNECTION_URI) await conn.execute( @@ -605,7 +630,7 @@ async def test_query_source_with_module_in_search_path(sentry_init, capture_even from asyncpg_helpers.helpers import execute_query_in_connection - with start_transaction(name="test_transaction", sampled=True): + with start_span(name="test_span"): conn: Connection = await connect(PG_CONNECTION_URI) await execute_query_in_connection( @@ -649,27 +674,26 @@ async def test_no_query_source_if_duration_too_short(sentry_init, capture_events events = capture_events() - with start_transaction(name="test_transaction", sampled=True): + with start_span(name="test_span"): conn: Connection = await connect(PG_CONNECTION_URI) @contextmanager def fake_record_sql_queries(*args, **kwargs): - with freeze_time(datetime(2024, 1, 1, microsecond=0)): + with freeze_time(datetime.datetime(2024, 1, 1, microsecond=99999)): with record_sql_queries(*args, **kwargs) as span: - freezer = freeze_time(datetime(2024, 1, 1, microsecond=99999)) - freezer.start() - - freezer.stop() - - yield span + yield span with mock.patch( - "sentry_sdk.integrations.asyncpg.record_sql_queries", - fake_record_sql_queries, + "sentry_sdk.tracing.POTelSpan.start_timestamp", + datetime.datetime(2024, 1, 1, microsecond=0, tzinfo=datetime.timezone.utc), ): - await conn.execute( - "INSERT INTO users(name, password, dob) VALUES ('Alice', 'secret', '1990-12-25')", - ) + with mock.patch( + "sentry_sdk.integrations.asyncpg.record_sql_queries", + fake_record_sql_queries, + ): + await conn.execute( + "INSERT INTO users(name, password, dob) VALUES ('Alice', 'secret', '1990-12-25')", + ) await conn.close() @@ -697,27 +721,26 @@ async def test_query_source_if_duration_over_threshold(sentry_init, capture_even events = capture_events() - with start_transaction(name="test_transaction", sampled=True): + with start_span(name="test_span"): conn: Connection = await connect(PG_CONNECTION_URI) @contextmanager def fake_record_sql_queries(*args, **kwargs): - with freeze_time(datetime(2024, 1, 1, microsecond=0)): + with freeze_time(datetime.datetime(2024, 1, 1, microsecond=100001)): with record_sql_queries(*args, **kwargs) as span: - freezer = freeze_time(datetime(2024, 1, 1, microsecond=100001)) - freezer.start() - - freezer.stop() - - yield span + yield span with mock.patch( - "sentry_sdk.integrations.asyncpg.record_sql_queries", - fake_record_sql_queries, + "sentry_sdk.tracing.POTelSpan.start_timestamp", + datetime.datetime(2024, 1, 1, microsecond=0, tzinfo=datetime.timezone.utc), ): - await conn.execute( - "INSERT INTO users(name, password, dob) VALUES ('Alice', 'secret', '1990-12-25')", - ) + with mock.patch( + "sentry_sdk.integrations.asyncpg.record_sql_queries", + fake_record_sql_queries, + ): + await conn.execute( + "INSERT INTO users(name, password, dob) VALUES ('Alice', 'secret', '1990-12-25')", + ) await conn.close() @@ -760,7 +783,7 @@ async def test_span_origin(sentry_init, capture_events): events = capture_events() - with start_transaction(name="test_transaction"): + with start_span(name="test_span"): conn: Connection = await connect(PG_CONNECTION_URI) await conn.execute("SELECT 1") From caa1ebe5230697d155ddd093be9d39b8fb9186d3 Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Wed, 16 Oct 2024 12:40:04 +0200 Subject: [PATCH 064/244] Fix Django CI (#3659) --- sentry_sdk/integrations/django/__init__.py | 8 +- sentry_sdk/integrations/django/templates.py | 7 +- tests/integrations/django/test_basic.py | 17 ++-- .../integrations/django/test_cache_module.py | 66 +++++++------- .../integrations/django/test_db_query_data.py | 85 +++++++++---------- .../integrations/django/test_transactions.py | 1 + 6 files changed, 91 insertions(+), 93 deletions(-) diff --git a/sentry_sdk/integrations/django/__init__.py b/sentry_sdk/integrations/django/__init__.py index e68f0cacef..ba2cc6a0ad 100644 --- a/sentry_sdk/integrations/django/__init__.py +++ b/sentry_sdk/integrations/django/__init__.py @@ -652,8 +652,8 @@ def execute(self, sql, params=None): _set_db_data(span, self) result = real_execute(self, sql, params) - with capture_internal_exceptions(): - add_query_source(span) + with capture_internal_exceptions(): + add_query_source(span) return result @@ -672,8 +672,8 @@ def executemany(self, sql, param_list): result = real_executemany(self, sql, param_list) - with capture_internal_exceptions(): - add_query_source(span) + with capture_internal_exceptions(): + add_query_source(span) return result diff --git a/sentry_sdk/integrations/django/templates.py b/sentry_sdk/integrations/django/templates.py index 10e8a924b7..f5309c9cf3 100644 --- a/sentry_sdk/integrations/django/templates.py +++ b/sentry_sdk/integrations/django/templates.py @@ -73,7 +73,9 @@ def rendered_content(self): name=_get_template_name_description(self.template_name), origin=DjangoIntegration.origin, ) as span: - span.set_data("context", self.context_data) + if isinstance(self.context_data, dict): + for k, v in self.context_data.items(): + span.set_data(f"context.{k}", v) return real_rendered_content.fget(self) SimpleTemplateResponse.rendered_content = rendered_content @@ -101,7 +103,8 @@ def render(request, template_name, context=None, *args, **kwargs): name=_get_template_name_description(template_name), origin=DjangoIntegration.origin, ) as span: - span.set_data("context", context) + for k, v in context.items(): + span.set_data(f"context.{k}", v) return real_render(request, template_name, context, *args, **kwargs) django.shortcuts.render = render diff --git a/tests/integrations/django/test_basic.py b/tests/integrations/django/test_basic.py index b05a7c3521..2043758949 100644 --- a/tests/integrations/django/test_basic.py +++ b/tests/integrations/django/test_basic.py @@ -8,7 +8,6 @@ from werkzeug.test import Client from django import VERSION as DJANGO_VERSION -from django.contrib.auth.models import User from django.core.management import execute_from_command_line from django.db.utils import OperationalError, ProgrammingError, DataError from django.http.request import RawPostDataException @@ -288,6 +287,9 @@ def test_user_captured(sentry_init, client, capture_events): def test_queryset_repr(sentry_init, capture_events): sentry_init(integrations=[DjangoIntegration()]) events = capture_events() + + from django.contrib.auth.models import User + User.objects.create_user("john", "lennon@thebeatles.com", "johnpassword") try: @@ -374,7 +376,7 @@ def test_sql_queries(sentry_init, capture_events, with_integration): crumb = event["breadcrumbs"]["values"][-1] assert crumb["message"] == "SELECT count(*) FROM people_person WHERE foo = %s" - assert crumb["data"]["db.params"] == [123] + assert crumb["data"]["db.params"] == "[123]" @pytest.mark.forked @@ -409,7 +411,7 @@ def test_sql_dict_query_params(sentry_init, capture_events): assert crumb["message"] == ( "SELECT count(*) FROM people_person WHERE foo = %(my_foo)s" ) - assert crumb["data"]["db.params"] == {"my_foo": 10} + assert crumb["data"]["db.params"] == str({"my_foo": 10}) @pytest.mark.forked @@ -471,7 +473,7 @@ def test_sql_psycopg2_string_composition(sentry_init, capture_events, query): (event,) = events crumb = event["breadcrumbs"]["values"][-1] assert crumb["message"] == ('SELECT %(my_param)s FROM "foobar"') - assert crumb["data"]["db.params"] == {"my_param": 10} + assert crumb["data"]["db.params"] == str({"my_param": 10}) @pytest.mark.forked @@ -524,7 +526,7 @@ def test_sql_psycopg2_placeholders(sentry_init, capture_events): { "category": "query", "data": { - "db.params": {"first_var": "fizz", "second_var": "not a date"}, + "db.params": str({"first_var": "fizz", "second_var": "not a date"}), "db.paramstyle": "format", }, "message": 'insert into my_test_table ("foo", "bar") values (%(first_var)s, ' @@ -928,6 +930,11 @@ def test_render_spans(sentry_init, client, capture_events, render_span_tree): transaction = events[0] assert expected_line in render_span_tree(transaction) + render_span = next( + span for span in transaction["spans"] if span["op"] == "template.render" + ) + assert "context.user_age" in render_span["data"] + if DJANGO_VERSION >= (1, 10): EXPECTED_MIDDLEWARE_SPANS = """\ diff --git a/tests/integrations/django/test_cache_module.py b/tests/integrations/django/test_cache_module.py index 263f9f36f8..03e4925ab0 100644 --- a/tests/integrations/django/test_cache_module.py +++ b/tests/integrations/django/test_cache_module.py @@ -511,7 +511,9 @@ def test_cache_spans_item_size(sentry_init, client, capture_events, use_django_c @pytest.mark.forked @pytest_mark_django_db_decorator() -def test_cache_spans_get_many(sentry_init, capture_events, use_django_caching): +def test_cache_spans_get_many( + sentry_init, capture_events, use_django_caching, render_span_tree +): sentry_init( integrations=[ DjangoIntegration( @@ -528,7 +530,7 @@ def test_cache_spans_get_many(sentry_init, capture_events, use_django_caching): from django.core.cache import cache - with sentry_sdk.start_transaction(): + with sentry_sdk.start_transaction(name="caches"): cache.get_many([f"S{id}", f"S{id+1}"]) cache.set(f"S{id}", "Sensitive1") cache.get_many([f"S{id}", f"S{id+1}"]) @@ -536,31 +538,26 @@ def test_cache_spans_get_many(sentry_init, capture_events, use_django_caching): (transaction,) = events assert len(transaction["spans"]) == 7 - assert transaction["spans"][0]["op"] == "cache.get" - assert transaction["spans"][0]["description"] == f"S{id}, S{id+1}" - - assert transaction["spans"][1]["op"] == "cache.get" - assert transaction["spans"][1]["description"] == f"S{id}" - - assert transaction["spans"][2]["op"] == "cache.get" - assert transaction["spans"][2]["description"] == f"S{id+1}" - - assert transaction["spans"][3]["op"] == "cache.put" - assert transaction["spans"][3]["description"] == f"S{id}" - - assert transaction["spans"][4]["op"] == "cache.get" - assert transaction["spans"][4]["description"] == f"S{id}, S{id+1}" - - assert transaction["spans"][5]["op"] == "cache.get" - assert transaction["spans"][5]["description"] == f"S{id}" - - assert transaction["spans"][6]["op"] == "cache.get" - assert transaction["spans"][6]["description"] == f"S{id+1}" + assert ( + render_span_tree(transaction) + == f"""\ +- op="caches": description=null + - op="cache.get": description="S{id}, S{id+1}" + - op="cache.get": description="S{id}" + - op="cache.get": description="S{id+1}" + - op="cache.put": description="S{id}" + - op="cache.get": description="S{id}, S{id+1}" + - op="cache.get": description="S{id}" + - op="cache.get": description="S{id+1}"\ +""" # noqa: E221 + ) @pytest.mark.forked @pytest_mark_django_db_decorator() -def test_cache_spans_set_many(sentry_init, capture_events, use_django_caching): +def test_cache_spans_set_many( + sentry_init, capture_events, use_django_caching, render_span_tree +): sentry_init( integrations=[ DjangoIntegration( @@ -577,24 +574,23 @@ def test_cache_spans_set_many(sentry_init, capture_events, use_django_caching): from django.core.cache import cache - with sentry_sdk.start_transaction(): + with sentry_sdk.start_transaction(name="caches"): cache.set_many({f"S{id}": "Sensitive1", f"S{id+1}": "Sensitive2"}) cache.get(f"S{id}") (transaction,) = events assert len(transaction["spans"]) == 4 - assert transaction["spans"][0]["op"] == "cache.put" - assert transaction["spans"][0]["description"] == f"S{id}, S{id+1}" - - assert transaction["spans"][1]["op"] == "cache.put" - assert transaction["spans"][1]["description"] == f"S{id}" - - assert transaction["spans"][2]["op"] == "cache.put" - assert transaction["spans"][2]["description"] == f"S{id+1}" - - assert transaction["spans"][3]["op"] == "cache.get" - assert transaction["spans"][3]["description"] == f"S{id}" + assert ( + render_span_tree(transaction) + == f"""\ +- op="caches": description=null + - op="cache.put": description="S{id}, S{id+1}" + - op="cache.put": description="S{id}" + - op="cache.put": description="S{id+1}" + - op="cache.get": description="S{id}"\ +""" # noqa: E221 + ) @pytest.mark.forked diff --git a/tests/integrations/django/test_db_query_data.py b/tests/integrations/django/test_db_query_data.py index 6e49f61085..ccbe6ee28a 100644 --- a/tests/integrations/django/test_db_query_data.py +++ b/tests/integrations/django/test_db_query_data.py @@ -1,6 +1,7 @@ import os import pytest +from contextlib import contextmanager from datetime import datetime from unittest import mock @@ -15,7 +16,7 @@ from freezegun import freeze_time from werkzeug.test import Client -from sentry_sdk import start_transaction +from sentry_sdk import start_transaction, start_span from sentry_sdk.consts import SPANDATA from sentry_sdk.integrations.django import DjangoIntegration from sentry_sdk.tracing_utils import record_sql_queries @@ -347,29 +348,24 @@ def test_no_query_source_if_duration_too_short(sentry_init, client, capture_even events = capture_events() - class fake_record_sql_queries: # noqa: N801 - def __init__(self, *args, **kwargs): - with freeze_time(datetime(2024, 1, 1, microsecond=0)): - with record_sql_queries(*args, **kwargs) as span: - self.span = span - freezer = freeze_time(datetime(2024, 1, 1, microsecond=99999)) - freezer.start() - - freezer.stop() - - def __enter__(self): - return self.span - - def __exit__(self, type, value, traceback): - pass - - with mock.patch( - "sentry_sdk.integrations.django.record_sql_queries", - fake_record_sql_queries, - ): - _, status, _ = unpack_werkzeug_response( - client.get(reverse("postgres_select_orm")) - ) + def fake_start_span(*args, **kwargs): # noqa: N801 + with freeze_time(datetime(2024, 1, 1, microsecond=0)): + return start_span(*args, **kwargs) + + @contextmanager + def fake_record_sql_queries(*args, **kwargs): # noqa: N801 + with freeze_time(datetime(2024, 1, 1, microsecond=99999)): + with record_sql_queries(*args, **kwargs) as span: + yield span + + with mock.patch("sentry_sdk.start_span", fake_start_span): + with mock.patch( + "sentry_sdk.integrations.django.record_sql_queries", + fake_record_sql_queries, + ): + _, status, _ = unpack_werkzeug_response( + client.get(reverse("postgres_select_orm")) + ) assert status == "200 OK" @@ -407,29 +403,24 @@ def test_query_source_if_duration_over_threshold(sentry_init, client, capture_ev events = capture_events() - class fake_record_sql_queries: # noqa: N801 - def __init__(self, *args, **kwargs): - with freeze_time(datetime(2024, 1, 1, microsecond=0)): - with record_sql_queries(*args, **kwargs) as span: - self.span = span - freezer = freeze_time(datetime(2024, 1, 1, microsecond=99999)) - freezer.start() - - freezer.stop() - - def __enter__(self): - return self.span - - def __exit__(self, type, value, traceback): - pass - - with mock.patch( - "sentry_sdk.integrations.django.record_sql_queries", - fake_record_sql_queries, - ): - _, status, _ = unpack_werkzeug_response( - client.get(reverse("postgres_select_orm")) - ) + def fake_start_span(*args, **kwargs): # noqa: N801 + with freeze_time(datetime(2024, 1, 1, microsecond=0)): + return start_span(*args, **kwargs) + + @contextmanager + def fake_record_sql_queries(*args, **kwargs): # noqa: N801 + with freeze_time(datetime(2024, 1, 1, microsecond=100001)): + with record_sql_queries(*args, **kwargs) as span: + yield span + + with mock.patch("sentry_sdk.start_span", fake_start_span): + with mock.patch( + "sentry_sdk.integrations.django.record_sql_queries", + fake_record_sql_queries, + ): + _, status, _ = unpack_werkzeug_response( + client.get(reverse("postgres_select_orm")) + ) assert status == "200 OK" diff --git a/tests/integrations/django/test_transactions.py b/tests/integrations/django/test_transactions.py index 14f8170fc3..0eaf99dc23 100644 --- a/tests/integrations/django/test_transactions.py +++ b/tests/integrations/django/test_transactions.py @@ -21,6 +21,7 @@ included_url_conf = ((re_path(r"^foo/bar/(?P[\w]+)", lambda x: ""),), "") from sentry_sdk.integrations.django.transactions import RavenResolver +from tests.integrations.django.myapp.wsgi import application # noqa: F401 example_url_conf = ( From 2a3a7386a8550717384fbbdaa603abdba567391a Mon Sep 17 00:00:00 2001 From: Ivana Kellyer Date: Thu, 17 Oct 2024 13:48:09 +0200 Subject: [PATCH 065/244] remove PY37, everything is PY37 --- sentry_sdk/_compat.py | 1 - 1 file changed, 1 deletion(-) diff --git a/sentry_sdk/_compat.py b/sentry_sdk/_compat.py index a811cf2120..fc04ed5859 100644 --- a/sentry_sdk/_compat.py +++ b/sentry_sdk/_compat.py @@ -9,7 +9,6 @@ T = TypeVar("T") -PY37 = sys.version_info[0] == 3 and sys.version_info[1] >= 7 PY38 = sys.version_info[0] == 3 and sys.version_info[1] >= 8 PY310 = sys.version_info[0] == 3 and sys.version_info[1] >= 10 PY311 = sys.version_info[0] == 3 and sys.version_info[1] >= 11 From b46faea39547f8d9909f7533b57bcd2afcc9086f Mon Sep 17 00:00:00 2001 From: Ivana Kellyer Date: Thu, 17 Oct 2024 13:53:28 +0200 Subject: [PATCH 066/244] more leftovers --- tests/test_transport.py | 5 ----- 1 file changed, 5 deletions(-) diff --git a/tests/test_transport.py b/tests/test_transport.py index fa4e13f834..6f32b82108 100644 --- a/tests/test_transport.py +++ b/tests/test_transport.py @@ -14,11 +14,6 @@ from pytest_localserver.http import WSGIServer from werkzeug.wrappers import Request, Response -try: - import gevent -except ImportError: - gevent = None - import sentry_sdk from sentry_sdk import ( Client, From acf774694e024bb2f4a57f158598a55a3062e3eb Mon Sep 17 00:00:00 2001 From: Ivana Kellyer Date: Thu, 17 Oct 2024 17:13:38 +0200 Subject: [PATCH 067/244] Serialize span attrs properly (#3668) --- sentry_sdk/integrations/asyncpg.py | 3 +- sentry_sdk/integrations/clickhouse_driver.py | 12 +++-- sentry_sdk/tracing_utils.py | 9 ++-- sentry_sdk/utils.py | 26 ++++++++++ .../test_clickhouse_driver.py | 48 +++++++++---------- tests/integrations/django/test_basic.py | 8 ++-- tests/test_utils.py | 32 +++++++++++++ 7 files changed, 100 insertions(+), 38 deletions(-) diff --git a/sentry_sdk/integrations/asyncpg.py b/sentry_sdk/integrations/asyncpg.py index 71740cb3aa..c1f2557a20 100644 --- a/sentry_sdk/integrations/asyncpg.py +++ b/sentry_sdk/integrations/asyncpg.py @@ -8,6 +8,7 @@ from sentry_sdk.tracing import Span from sentry_sdk.tracing_utils import add_query_source, record_sql_queries from sentry_sdk.utils import ( + _serialize_span_attribute, ensure_integration_enabled, parse_version, capture_internal_exceptions, @@ -147,7 +148,7 @@ def _inner(*args: Any, **kwargs: Any) -> T: # noqa: N807 ) as span: _set_db_data(span, args[0]) res = f(*args, **kwargs) - span.set_attribute("db.cursor", str(res)) + span.set_attribute("db.cursor", _serialize_span_attribute(res)) return res diff --git a/sentry_sdk/integrations/clickhouse_driver.py b/sentry_sdk/integrations/clickhouse_driver.py index 4ee18d0e54..245ea0ef71 100644 --- a/sentry_sdk/integrations/clickhouse_driver.py +++ b/sentry_sdk/integrations/clickhouse_driver.py @@ -3,7 +3,11 @@ from sentry_sdk.integrations import Integration, DidNotEnable from sentry_sdk.tracing import Span from sentry_sdk.scope import should_send_default_pii -from sentry_sdk.utils import capture_internal_exceptions, ensure_integration_enabled +from sentry_sdk.utils import ( + _serialize_span_attribute, + capture_internal_exceptions, + ensure_integration_enabled, +) from typing import TYPE_CHECKING, TypeVar @@ -99,7 +103,7 @@ def _inner(*args: P.args, **kwargs: P.kwargs) -> T: if params and should_send_default_pii(): connection._sentry_db_params = params - span.set_attribute("db.params", str(params)) + span.set_attribute("db.params", _serialize_span_attribute(params)) # run the original code ret = f(*args, **kwargs) @@ -117,7 +121,7 @@ def _inner_end(*args: P.args, **kwargs: P.kwargs) -> T: if span is not None: if res is not None and should_send_default_pii(): - span.set_attribute("db.result", str(res)) + span.set_attribute("db.result", _serialize_span_attribute(res)) with capture_internal_exceptions(): query = span.get_attribute("db.query.text") @@ -159,7 +163,7 @@ def _inner_send_data(*args: P.args, **kwargs: P.kwargs) -> T: getattr(instance.connection, "_sentry_db_params", None) or [] ) db_params.extend(data) - span.set_attribute("db.params", str(db_params)) + span.set_attribute("db.params", _serialize_span_attribute(db_params)) try: del instance.connection._sentry_db_params except AttributeError: diff --git a/sentry_sdk/tracing_utils.py b/sentry_sdk/tracing_utils.py index b8f7288374..28b301c397 100644 --- a/sentry_sdk/tracing_utils.py +++ b/sentry_sdk/tracing_utils.py @@ -3,11 +3,11 @@ import os import re import sys +import uuid from collections.abc import Mapping from datetime import datetime, timedelta, timezone from functools import wraps from urllib.parse import quote, unquote -import uuid import sentry_sdk from sentry_sdk.consts import OP, SPANDATA @@ -23,6 +23,7 @@ _is_external_source, _is_in_project_root, _module_in_list, + _serialize_span_attribute, ) from typing import TYPE_CHECKING @@ -133,13 +134,13 @@ def record_sql_queries( data = {} if params_list is not None: - data["db.params"] = str(params_list) + data["db.params"] = _serialize_span_attribute(params_list) if paramstyle is not None: - data["db.paramstyle"] = str(paramstyle) + data["db.paramstyle"] = _serialize_span_attribute(paramstyle) if executemany: data["db.executemany"] = True if record_cursor_repr and cursor is not None: - data["db.cursor"] = str(cursor) + data["db.cursor"] = _serialize_span_attribute(cursor) with capture_internal_exceptions(): sentry_sdk.add_breadcrumb(message=query, category="query", data=data) diff --git a/sentry_sdk/utils.py b/sentry_sdk/utils.py index 80a5df9700..61ba34d956 100644 --- a/sentry_sdk/utils.py +++ b/sentry_sdk/utils.py @@ -51,6 +51,7 @@ ) from gevent.hub import Hub as GeventHub + from opentelemetry.util.types import AttributeValue from sentry_sdk._types import Event, ExcInfo @@ -1831,3 +1832,28 @@ def get_current_thread_meta(thread=None): # we've tried everything, time to give up return None, None + + +def _serialize_span_attribute(value): + # type: (Any) -> Optional[AttributeValue] + """Serialize an object so that it's OTel-compatible and displays nicely in Sentry.""" + # check for allowed primitives + if isinstance(value, (int, str, float, bool)): + return value + + # lists are allowed too, as long as they don't mix types + if isinstance(value, (list, tuple)): + for type_ in (int, str, float, bool): + if all(isinstance(item, type_) for item in value): + return list(value) + + # if this is anything else, just try to coerce to string + # we prefer json.dumps since this makes things like dictionaries display + # nicely in the UI + try: + return json.dumps(value) + except TypeError: + try: + return str(value) + except Exception: + return None diff --git a/tests/integrations/clickhouse_driver/test_clickhouse_driver.py b/tests/integrations/clickhouse_driver/test_clickhouse_driver.py index 6378919b06..5da77ce13d 100644 --- a/tests/integrations/clickhouse_driver/test_clickhouse_driver.py +++ b/tests/integrations/clickhouse_driver/test_clickhouse_driver.py @@ -16,8 +16,6 @@ if clickhouse_driver.VERSION < (0, 2, 6): EXPECT_PARAMS_IN_SELECT = False -PARAMS_SERIALIZER = str - def test_clickhouse_client_breadcrumbs(sentry_init, capture_events) -> None: sentry_init( @@ -144,7 +142,7 @@ def test_clickhouse_client_breadcrumbs_with_pii(sentry_init, capture_events) -> "db.user": "default", "server.address": "localhost", "server.port": 9000, - "db.result": str([]), + "db.result": [], }, "message": "DROP TABLE IF EXISTS test", "type": "default", @@ -157,7 +155,7 @@ def test_clickhouse_client_breadcrumbs_with_pii(sentry_init, capture_events) -> "db.user": "default", "server.address": "localhost", "server.port": 9000, - "db.result": str([]), + "db.result": [], }, "message": "CREATE TABLE test (x Int32) ENGINE = Memory", "type": "default", @@ -170,7 +168,7 @@ def test_clickhouse_client_breadcrumbs_with_pii(sentry_init, capture_events) -> "db.user": "default", "server.address": "localhost", "server.port": 9000, - "db.params": PARAMS_SERIALIZER([{"x": 100}]), + "db.params": '[{"x": 100}]', }, "message": "INSERT INTO test (x) VALUES", "type": "default", @@ -183,7 +181,7 @@ def test_clickhouse_client_breadcrumbs_with_pii(sentry_init, capture_events) -> "db.user": "default", "server.address": "localhost", "server.port": 9000, - "db.params": PARAMS_SERIALIZER([[170], [200]]), + "db.params": "[[170], [200]]", }, "message": "INSERT INTO test (x) VALUES", "type": "default", @@ -196,8 +194,8 @@ def test_clickhouse_client_breadcrumbs_with_pii(sentry_init, capture_events) -> "db.user": "default", "server.address": "localhost", "server.port": 9000, - "db.result": str([[370]]), - "db.params": PARAMS_SERIALIZER({"minv": 150}), + "db.result": "[[370]]", + "db.params": '{"minv": 150}', }, "message": "SELECT sum(x) FROM test WHERE x > 150", "type": "default", @@ -396,7 +394,7 @@ def test_clickhouse_client_spans_with_pii( "server.address": "localhost", "server.port": 9000, "db.query.text": "DROP TABLE IF EXISTS test", - "db.result": str([]), + "db.result": [], }, "trace_id": transaction_trace_id, "parent_span_id": transaction_span_id, @@ -413,7 +411,7 @@ def test_clickhouse_client_spans_with_pii( "db.name": "", "db.user": "default", "db.query.text": "CREATE TABLE test (x Int32) ENGINE = Memory", - "db.result": str([]), + "db.result": [], "server.address": "localhost", "server.port": 9000, }, @@ -432,7 +430,7 @@ def test_clickhouse_client_spans_with_pii( "db.name": "", "db.user": "default", "db.query.text": "INSERT INTO test (x) VALUES", - "db.params": PARAMS_SERIALIZER([{"x": 100}]), + "db.params": '[{"x": 100}]', "server.address": "localhost", "server.port": 9000, }, @@ -468,9 +466,9 @@ def test_clickhouse_client_spans_with_pii( "db.system": "clickhouse", "db.name": "", "db.user": "default", - "db.params": PARAMS_SERIALIZER({"minv": 150}), + "db.params": '{"minv": 150}', "db.query.text": "SELECT sum(x) FROM test WHERE x > 150", - "db.result": str([(370,)]), + "db.result": "[[370]]", "server.address": "localhost", "server.port": 9000, }, @@ -622,7 +620,7 @@ def test_clickhouse_dbapi_breadcrumbs_with_pii(sentry_init, capture_events) -> N "db.user": "default", "server.address": "localhost", "server.port": 9000, - "db.result": str([[], []]), + "db.result": "[[], []]", }, "message": "DROP TABLE IF EXISTS test", "type": "default", @@ -635,7 +633,7 @@ def test_clickhouse_dbapi_breadcrumbs_with_pii(sentry_init, capture_events) -> N "db.user": "default", "server.address": "localhost", "server.port": 9000, - "db.result": str([[], []]), + "db.result": "[[], []]", }, "message": "CREATE TABLE test (x Int32) ENGINE = Memory", "type": "default", @@ -648,7 +646,7 @@ def test_clickhouse_dbapi_breadcrumbs_with_pii(sentry_init, capture_events) -> N "db.user": "default", "server.address": "localhost", "server.port": 9000, - "db.params": PARAMS_SERIALIZER([{"x": 100}]), + "db.params": '[{"x": 100}]', }, "message": "INSERT INTO test (x) VALUES", "type": "default", @@ -661,7 +659,7 @@ def test_clickhouse_dbapi_breadcrumbs_with_pii(sentry_init, capture_events) -> N "db.user": "default", "server.address": "localhost", "server.port": 9000, - "db.params": PARAMS_SERIALIZER([[170], [200]]), + "db.params": "[[170], [200]]", }, "message": "INSERT INTO test (x) VALUES", "type": "default", @@ -674,8 +672,8 @@ def test_clickhouse_dbapi_breadcrumbs_with_pii(sentry_init, capture_events) -> N "db.user": "default", "server.address": "localhost", "server.port": 9000, - "db.params": PARAMS_SERIALIZER({"minv": 150}), - "db.result": str([[["370"]], [["'sum(x)'", "'Int64'"]]]), + "db.params": '{"minv": 150}', + "db.result": '[[["370"]], [["\'sum(x)\'", "\'Int64\'"]]]', }, "message": "SELECT sum(x) FROM test WHERE x > 150", "type": "default", @@ -869,7 +867,7 @@ def test_clickhouse_dbapi_spans_with_pii( "db.name": "", "db.user": "default", "db.query.text": "DROP TABLE IF EXISTS test", - "db.result": str(([], [])), + "db.result": "[[], []]", "server.address": "localhost", "server.port": 9000, }, @@ -888,7 +886,7 @@ def test_clickhouse_dbapi_spans_with_pii( "db.name": "", "db.user": "default", "db.query.text": "CREATE TABLE test (x Int32) ENGINE = Memory", - "db.result": str(([], [])), + "db.result": "[[], []]", "server.address": "localhost", "server.port": 9000, }, @@ -907,7 +905,7 @@ def test_clickhouse_dbapi_spans_with_pii( "db.name": "", "db.user": "default", "db.query.text": "INSERT INTO test (x) VALUES", - "db.params": PARAMS_SERIALIZER([{"x": 100}]), + "db.params": '[{"x": 100}]', "server.address": "localhost", "server.port": 9000, }, @@ -926,7 +924,7 @@ def test_clickhouse_dbapi_spans_with_pii( "db.name": "", "db.user": "default", "db.query.text": "INSERT INTO test (x) VALUES", - "db.params": PARAMS_SERIALIZER([[170], [200]]), + "db.params": "[[170], [200]]", "server.address": "localhost", "server.port": 9000, }, @@ -945,8 +943,8 @@ def test_clickhouse_dbapi_spans_with_pii( "db.name": "", "db.user": "default", "db.query.text": "SELECT sum(x) FROM test WHERE x > 150", - "db.params": PARAMS_SERIALIZER({"minv": 150}), - "db.result": str(([(370,)], [("sum(x)", "Int64")])), + "db.params": '{"minv": 150}', + "db.result": '[[[370]], [["sum(x)", "Int64"]]]', "server.address": "localhost", "server.port": 9000, }, diff --git a/tests/integrations/django/test_basic.py b/tests/integrations/django/test_basic.py index ed39dda350..8d0dbb9e32 100644 --- a/tests/integrations/django/test_basic.py +++ b/tests/integrations/django/test_basic.py @@ -376,7 +376,7 @@ def test_sql_queries(sentry_init, capture_events, with_integration): crumb = event["breadcrumbs"]["values"][-1] assert crumb["message"] == "SELECT count(*) FROM people_person WHERE foo = %s" - assert crumb["data"]["db.params"] == "[123]" + assert crumb["data"]["db.params"] == [123] @pytest.mark.forked @@ -411,7 +411,7 @@ def test_sql_dict_query_params(sentry_init, capture_events): assert crumb["message"] == ( "SELECT count(*) FROM people_person WHERE foo = %(my_foo)s" ) - assert crumb["data"]["db.params"] == str({"my_foo": 10}) + assert crumb["data"]["db.params"] == '{"my_foo": 10}' @pytest.mark.forked @@ -473,7 +473,7 @@ def test_sql_psycopg2_string_composition(sentry_init, capture_events, query): (event,) = events crumb = event["breadcrumbs"]["values"][-1] assert crumb["message"] == ('SELECT %(my_param)s FROM "foobar"') - assert crumb["data"]["db.params"] == str({"my_param": 10}) + assert crumb["data"]["db.params"] == '{"my_param": 10}' @pytest.mark.forked @@ -526,7 +526,7 @@ def test_sql_psycopg2_placeholders(sentry_init, capture_events): { "category": "query", "data": { - "db.params": str({"first_var": "fizz", "second_var": "not a date"}), + "db.params": '{"first_var": "fizz", "second_var": "not a date"}', "db.paramstyle": "format", }, "message": 'insert into my_test_table ("foo", "bar") values (%(first_var)s, ' diff --git a/tests/test_utils.py b/tests/test_utils.py index 6745e2a966..5011662f05 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -30,6 +30,7 @@ _get_installed_modules, _generate_installed_modules, ensure_integration_enabled, + _serialize_span_attribute, ) @@ -901,3 +902,34 @@ def test_format_timestamp_naive(): # Ensure that some timestamp is returned, without error. We currently treat these as local time, but this is an # implementation detail which we should not assert here. assert re.fullmatch(timestamp_regex, format_timestamp(datetime_object)) + + +class NoStr: + def __str__(self): + 1 / 0 + + +@pytest.mark.parametrize( + ("value", "result"), + ( + ("meow", "meow"), + (1, 1), + (47.0, 47.0), + (True, True), + (["meow", "bark"], ["meow", "bark"]), + ([True, False], [True, False]), + ([1, 2, 3], [1, 2, 3]), + ([46.5, 47.0, 47.5], [46.5, 47.0, 47.5]), + (["meow", 47], '["meow", 47]'), # mixed types not allowed in a list + (None, "null"), + ( + {"cat": "meow", "dog": ["bark", "woof"]}, + '{"cat": "meow", "dog": ["bark", "woof"]}', + ), + (datetime(2024, 1, 1), "2024-01-01 00:00:00"), + (("meow", "purr"), ["meow", "purr"]), + (NoStr(), None), + ), +) +def test_serialize_span_attribute(value, result): + assert _serialize_span_attribute(value) == result From 6e3778d87f71a705013239d8d556501a070c9012 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Mon, 21 Oct 2024 13:58:03 +0200 Subject: [PATCH 068/244] Move `subprocess` breadcrumbs from `maybe_create_breadcrumbs_from_span` to integration. (#3637) Move `subprocess` breadcrumbs from `maybe_create_breadcrumbs_from_span` into the stdlib integration and preserve the breadcrumb behavior in POTel. --- sentry_sdk/integrations/stdlib.py | 19 +++ sentry_sdk/tracing_utils.py | 13 +- tests/integrations/stdlib/test_subprocess.py | 37 ++++- tests/test_breadcrumbs.py | 140 +++++++++++++++++++ 4 files changed, 201 insertions(+), 8 deletions(-) create mode 100644 tests/test_breadcrumbs.py diff --git a/sentry_sdk/integrations/stdlib.py b/sentry_sdk/integrations/stdlib.py index 287c8cb272..8e038d6d3b 100644 --- a/sentry_sdk/integrations/stdlib.py +++ b/sentry_sdk/integrations/stdlib.py @@ -13,6 +13,7 @@ SENSITIVE_DATA_SUBSTITUTE, capture_internal_exceptions, ensure_integration_enabled, + get_current_thread_meta, is_sentry_url, logger, safe_repr, @@ -225,6 +226,24 @@ def sentry_patched_popen_init(self, *a, **kw): rv = old_popen_init(self, *a, **kw) span.set_tag("subprocess.pid", self.pid) + + with capture_internal_exceptions(): + thread_id, thread_name = get_current_thread_meta() + breadcrumb_data = { + "subprocess.pid": self.pid, + "thread.id": thread_id, + "thread.name": thread_name, + } + if cwd: + breadcrumb_data["subprocess.cwd"] = cwd + + sentry_sdk.add_breadcrumb( + type="subprocess", + category="subprocess", + message=description, + data=breadcrumb_data, + ) + return rv subprocess.Popen.__init__ = sentry_patched_popen_init # type: ignore diff --git a/sentry_sdk/tracing_utils.py b/sentry_sdk/tracing_utils.py index 28b301c397..f063897cb9 100644 --- a/sentry_sdk/tracing_utils.py +++ b/sentry_sdk/tracing_utils.py @@ -157,18 +157,17 @@ def record_sql_queries( def maybe_create_breadcrumbs_from_span(scope, span): # type: (sentry_sdk.Scope, sentry_sdk.tracing.Span) -> None - if span.op == OP.DB_REDIS: scope.add_breadcrumb( - message=span.description, type="redis", category="redis", data=span._tags + message=span.description, + type="redis", + category="redis", + data=span._tags, ) elif span.op == OP.HTTP_CLIENT: - scope.add_breadcrumb(type="http", category="httplib", data=span._data) - elif span.op == "subprocess": scope.add_breadcrumb( - type="subprocess", - category="subprocess", - message=span.description, + type="http", + category="httplib", data=span._data, ) diff --git a/tests/integrations/stdlib/test_subprocess.py b/tests/integrations/stdlib/test_subprocess.py index 593ef8a0dc..62d5a2aeba 100644 --- a/tests/integrations/stdlib/test_subprocess.py +++ b/tests/integrations/stdlib/test_subprocess.py @@ -3,10 +3,11 @@ import subprocess import sys from collections.abc import Mapping +from unittest import mock import pytest -from sentry_sdk import capture_message, start_transaction +from sentry_sdk import capture_exception, capture_message, start_transaction from sentry_sdk.integrations.stdlib import StdlibIntegration from tests.conftest import ApproxDict @@ -224,3 +225,37 @@ def test_subprocess_span_origin(sentry_init, capture_events): assert event["spans"][2]["op"] == "subprocess.wait" assert event["spans"][2]["origin"] == "auto.subprocess.stdlib.subprocess" + + +def test_subprocess_breadcrumb(sentry_init, capture_events): + sentry_init() + events = capture_events() + + args = [ + sys.executable, + "-c", + "print('hello world')", + ] + popen = subprocess.Popen(args) + popen.communicate() + popen.poll() + + try: + 1 / 0 + except ZeroDivisionError as ex: + capture_exception(ex) + + (event,) = events + breadcrumbs = event["breadcrumbs"]["values"] + assert len(breadcrumbs) == 1 + + (crumb,) = breadcrumbs + assert crumb["type"] == "subprocess" + assert crumb["category"] == "subprocess" + assert crumb["message"] == " ".join(args) + assert crumb["timestamp"] == mock.ANY + assert crumb["data"] == { + "subprocess.pid": popen.pid, + "thread.id": mock.ANY, + "thread.name": mock.ANY, + } diff --git a/tests/test_breadcrumbs.py b/tests/test_breadcrumbs.py new file mode 100644 index 0000000000..988f536fde --- /dev/null +++ b/tests/test_breadcrumbs.py @@ -0,0 +1,140 @@ +from unittest import mock + +import sentry_sdk +from sentry_sdk.consts import OP + + +def test_breadcrumbs(sentry_init, capture_events): + """ + This test illustrates how breadcrumbs are added to the error event when an error occurs + """ + sentry_init( + traces_sample_rate=1.0, + ) + events = capture_events() + + add_breadcrumbs_kwargs = { + "type": "navigation", + "category": "unit_tests.breadcrumbs", + "level": "fatal", + "origin": "unit-tests", + "data": { + "string": "foobar", + "number": 4.2, + "array": [1, 2, 3], + "dict": {"foo": "bar"}, + }, + } + + with sentry_sdk.start_transaction(name="trx-breadcrumbs"): + sentry_sdk.add_breadcrumb(message="breadcrumb0", **add_breadcrumbs_kwargs) + + with sentry_sdk.start_span(name="span1", op="function"): + sentry_sdk.add_breadcrumb(message="breadcrumb1", **add_breadcrumbs_kwargs) + + with sentry_sdk.start_span(name="span2", op="function"): + sentry_sdk.add_breadcrumb( + message="breadcrumb2", **add_breadcrumbs_kwargs + ) + + # Spans that create breadcrumbs automatically + with sentry_sdk.start_span(name="span3", op=OP.DB_REDIS) as span3: + span3.set_data("span3_data", "data on the redis span") + span3.set_tag("span3_tag", "tag on the redis span") + + with sentry_sdk.start_span(name="span4", op=OP.HTTP_CLIENT) as span4: + span4.set_data("span4_data", "data on the http.client span") + span4.set_tag("span4_tag", "tag on the http.client span") + + with sentry_sdk.start_span(name="span5", op=OP.SUBPROCESS) as span5: + span5.set_data("span5_data", "data on the subprocess span") + span5.set_tag("span5_tag", "tag on the subprocess span") + + with sentry_sdk.start_span(name="span6", op="function") as span6: + # This data on the span is not added to custom breadcrumbs. + # Data from the span is only added to automatic breadcrumbs shown above + span6.set_data("span6_data", "data on span6") + span6.set_tag("span6_tag", "tag on the span6") + sentry_sdk.add_breadcrumb( + message="breadcrumb6", **add_breadcrumbs_kwargs + ) + + try: + 1 / 0 + except ZeroDivisionError as ex: + sentry_sdk.capture_exception(ex) + + (error,) = events + + breadcrumbs = error["breadcrumbs"]["values"] + + for crumb in breadcrumbs: + print(crumb) + + assert len(breadcrumbs) == 7 + + # Check for my custom breadcrumbs + for i in range(0, 3): + assert breadcrumbs[i]["message"] == f"breadcrumb{i}" + assert breadcrumbs[i]["type"] == "navigation" + assert breadcrumbs[i]["category"] == "unit_tests.breadcrumbs" + assert breadcrumbs[i]["level"] == "fatal" + assert breadcrumbs[i]["origin"] == "unit-tests" + assert breadcrumbs[i]["data"] == { + "string": "foobar", + "number": 4.2, + "array": [1, 2, 3], + "dict": {"foo": "bar"}, + } + assert breadcrumbs[i]["timestamp"] == mock.ANY + + # Check automatic redis breadcrumbs + assert breadcrumbs[3]["message"] == "span3" + assert breadcrumbs[3]["type"] == "redis" + assert breadcrumbs[3]["category"] == "redis" + assert "level" not in breadcrumbs[3] + assert "origin" not in breadcrumbs[3] + assert breadcrumbs[3]["data"] == { + "span3_tag": "tag on the redis span", + } + assert breadcrumbs[3]["timestamp"] == mock.ANY + + # Check automatic http.client breadcrumbs + assert "message" not in breadcrumbs[4] + assert breadcrumbs[4]["type"] == "http" + assert breadcrumbs[4]["category"] == "httplib" + assert "level" not in breadcrumbs[4] + assert "origin" not in breadcrumbs[4] + assert breadcrumbs[4]["data"] == { + "thread.id": mock.ANY, + "thread.name": mock.ANY, + "span4_data": "data on the http.client span", + } + assert breadcrumbs[4]["timestamp"] == mock.ANY + + # Check automatic subprocess breadcrumbs + assert breadcrumbs[5]["message"] == "span5" + assert breadcrumbs[5]["type"] == "subprocess" + assert breadcrumbs[5]["category"] == "subprocess" + assert "level" not in breadcrumbs[5] + assert "origin" not in breadcrumbs[5] + assert breadcrumbs[5]["data"] == { + "thread.id": mock.ANY, + "thread.name": mock.ANY, + "span5_data": "data on the subprocess span", + } + assert breadcrumbs[5]["timestamp"] == mock.ANY + + # Check for custom breadcrumbs on span6 + assert breadcrumbs[6]["message"] == "breadcrumb6" + assert breadcrumbs[6]["type"] == "navigation" + assert breadcrumbs[6]["category"] == "unit_tests.breadcrumbs" + assert breadcrumbs[6]["level"] == "fatal" + assert breadcrumbs[6]["origin"] == "unit-tests" + assert breadcrumbs[6]["data"] == { + "string": "foobar", + "number": 4.2, + "array": [1, 2, 3], + "dict": {"foo": "bar"}, + } + assert breadcrumbs[6]["timestamp"] == mock.ANY From 7cd4c8daaa0c4914caf5fc6c8e47cd6042990cb5 Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Mon, 21 Oct 2024 15:56:11 +0200 Subject: [PATCH 069/244] Add sentry_meta object to Span and pass it through to ReadableSpan (#3676) --- .../integrations/opentelemetry/integration.py | 37 ++++++++++++++++++- .../opentelemetry/potel_span_processor.py | 10 ++--- sentry_sdk/tracing.py | 3 +- 3 files changed, 42 insertions(+), 8 deletions(-) diff --git a/sentry_sdk/integrations/opentelemetry/integration.py b/sentry_sdk/integrations/opentelemetry/integration.py index 3f71e86f02..595fdfcb6e 100644 --- a/sentry_sdk/integrations/opentelemetry/integration.py +++ b/sentry_sdk/integrations/opentelemetry/integration.py @@ -18,7 +18,8 @@ try: from opentelemetry import trace from opentelemetry.propagate import set_global_textmap - from opentelemetry.sdk.trace import TracerProvider + from opentelemetry.trace import Span as AbstractSpan + from opentelemetry.sdk.trace import TracerProvider, Span, ReadableSpan except ImportError: raise DidNotEnable("opentelemetry not installed") @@ -27,6 +28,11 @@ except ImportError: DjangoInstrumentor = None +from sentry_sdk._types import TYPE_CHECKING + +if TYPE_CHECKING: + from typing import Union, Any + CONFIGURABLE_INSTRUMENTATIONS = { DjangoInstrumentor: {"is_sql_commentor_enabled": True}, @@ -45,11 +51,40 @@ def setup_once(): ) _setup_sentry_tracing() + _patch_readable_span() # _setup_instrumentors() logger.debug("[OTel] Finished setting up OpenTelemetry integration") +def _patch_readable_span(): + # type: () -> None + """ + We need to pass through sentry specific metadata/objects from Span to ReadableSpan + to work with them consistently in the SpanProcessor. + """ + + @property + def sentry_meta(self): + # type: (Union[AbstractSpan, Span, ReadableSpan]) -> dict[str, Any] + if not getattr(self, "_sentry_meta", None): + self._sentry_meta = {} + return self._sentry_meta + + AbstractSpan.sentry_meta = sentry_meta + ReadableSpan.sentry_meta = sentry_meta + + old_readable_span = Span._readable_span + + def sentry_patched_readable_span(self): + # type: (Span) -> ReadableSpan + readable_span = old_readable_span(self) + readable_span._sentry_meta = self._sentry_meta + return readable_span + + Span._readable_span = sentry_patched_readable_span + + def _setup_sentry_tracing(): # type: () -> None import opentelemetry.context diff --git a/sentry_sdk/integrations/opentelemetry/potel_span_processor.py b/sentry_sdk/integrations/opentelemetry/potel_span_processor.py index ed31608516..a254aada05 100644 --- a/sentry_sdk/integrations/opentelemetry/potel_span_processor.py +++ b/sentry_sdk/integrations/opentelemetry/potel_span_processor.py @@ -6,7 +6,7 @@ format_span_id, get_current_span, INVALID_SPAN, - Span as TraceApiSpan, + Span as AbstractSpan, ) from opentelemetry.context import Context from opentelemetry.sdk.trace import Span, ReadableSpan, SpanProcessor @@ -78,19 +78,19 @@ def force_flush(self, timeout_millis=30000): return True def _add_root_span(self, span, parent_span): - # type: (Span, TraceApiSpan) -> None + # type: (Span, AbstractSpan) -> None """ This is required to make POTelSpan.root_span work since we can't traverse back to the root purely with otel efficiently. """ if parent_span != INVALID_SPAN and not parent_span.get_span_context().is_remote: # child span points to parent's root or parent - span._sentry_root_otel_span = getattr( - parent_span, "_sentry_root_otel_span", parent_span + span.sentry_meta["root_span"] = parent_span.sentry_meta.get( + "root_span", parent_span ) else: # root span points to itself - span._sentry_root_otel_span = span + span.sentry_meta["root_span"] = span def _flush_root_span(self, span): # type: (ReadableSpan) -> None diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py index d4033b52ef..bdfa846257 100644 --- a/sentry_sdk/tracing.py +++ b/sentry_sdk/tracing.py @@ -1327,8 +1327,7 @@ def containing_transaction(self): def root_span(self): # type: () -> Optional[POTelSpan] root_otel_span = cast( - "Optional[OtelSpan]", - getattr(self._otel_span, "_sentry_root_otel_span", None), + "Optional[OtelSpan]", self._otel_span.sentry_meta.get("root_span", None) ) return POTelSpan(otel_span=root_otel_span) if root_otel_span else None From e72e62f616cbe156c6add3f39ccf4fb379e5f0fe Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Tue, 22 Oct 2024 15:50:13 +0200 Subject: [PATCH 070/244] Use root span transaction name in populated head DSC (#3677) --- .../integrations/opentelemetry/integration.py | 19 +------ .../opentelemetry/potel_span_processor.py | 9 ++-- .../integrations/opentelemetry/utils.py | 50 ++++++++++--------- sentry_sdk/tracing.py | 6 ++- 4 files changed, 38 insertions(+), 46 deletions(-) diff --git a/sentry_sdk/integrations/opentelemetry/integration.py b/sentry_sdk/integrations/opentelemetry/integration.py index 595fdfcb6e..944326a124 100644 --- a/sentry_sdk/integrations/opentelemetry/integration.py +++ b/sentry_sdk/integrations/opentelemetry/integration.py @@ -18,7 +18,6 @@ try: from opentelemetry import trace from opentelemetry.propagate import set_global_textmap - from opentelemetry.trace import Span as AbstractSpan from opentelemetry.sdk.trace import TracerProvider, Span, ReadableSpan except ImportError: raise DidNotEnable("opentelemetry not installed") @@ -28,11 +27,6 @@ except ImportError: DjangoInstrumentor = None -from sentry_sdk._types import TYPE_CHECKING - -if TYPE_CHECKING: - from typing import Union, Any - CONFIGURABLE_INSTRUMENTATIONS = { DjangoInstrumentor: {"is_sql_commentor_enabled": True}, @@ -63,23 +57,12 @@ def _patch_readable_span(): We need to pass through sentry specific metadata/objects from Span to ReadableSpan to work with them consistently in the SpanProcessor. """ - - @property - def sentry_meta(self): - # type: (Union[AbstractSpan, Span, ReadableSpan]) -> dict[str, Any] - if not getattr(self, "_sentry_meta", None): - self._sentry_meta = {} - return self._sentry_meta - - AbstractSpan.sentry_meta = sentry_meta - ReadableSpan.sentry_meta = sentry_meta - old_readable_span = Span._readable_span def sentry_patched_readable_span(self): # type: (Span) -> ReadableSpan readable_span = old_readable_span(self) - readable_span._sentry_meta = self._sentry_meta + readable_span._sentry_meta = getattr(self, "_sentry_meta", {}) return readable_span Span._readable_span = sentry_patched_readable_span diff --git a/sentry_sdk/integrations/opentelemetry/potel_span_processor.py b/sentry_sdk/integrations/opentelemetry/potel_span_processor.py index a254aada05..fb20c7abfe 100644 --- a/sentry_sdk/integrations/opentelemetry/potel_span_processor.py +++ b/sentry_sdk/integrations/opentelemetry/potel_span_processor.py @@ -20,6 +20,8 @@ extract_span_data, extract_transaction_name_source, get_trace_context, + get_sentry_meta, + set_sentry_meta, ) from sentry_sdk.integrations.opentelemetry.consts import ( OTEL_SENTRY_CONTEXT, @@ -85,12 +87,11 @@ def _add_root_span(self, span, parent_span): """ if parent_span != INVALID_SPAN and not parent_span.get_span_context().is_remote: # child span points to parent's root or parent - span.sentry_meta["root_span"] = parent_span.sentry_meta.get( - "root_span", parent_span - ) + parent_root_span = get_sentry_meta(parent_span, "root_span") + set_sentry_meta(span, "root_span", parent_root_span or parent_span) else: # root span points to itself - span.sentry_meta["root_span"] = span + set_sentry_meta(span, "root_span", span) def _flush_root_span(self, span): # type: (ReadableSpan) -> None diff --git a/sentry_sdk/integrations/opentelemetry/utils.py b/sentry_sdk/integrations/opentelemetry/utils.py index dc0fff7f8c..d274f4e887 100644 --- a/sentry_sdk/integrations/opentelemetry/utils.py +++ b/sentry_sdk/integrations/opentelemetry/utils.py @@ -5,7 +5,7 @@ from urllib3.util import parse_url as urlparse from urllib.parse import quote from opentelemetry.trace import ( - Span, + Span as AbstractSpan, SpanKind, StatusCode, format_trace_id, @@ -365,7 +365,7 @@ def has_incoming_trace(trace_state): def get_trace_state(span): - # type: (Union[Span, ReadableSpan]) -> TraceState + # type: (Union[AbstractSpan, ReadableSpan]) -> TraceState """ Get the existing trace_state with sentry items or populate it if we are the head SDK. @@ -404,27 +404,31 @@ def get_trace_state(span): Baggage.SENTRY_PREFIX + "public_key", Dsn(options["dsn"]).public_key ) - # we cannot access the root span in most cases here, so we HAVE to rely on the - # scopes to carry the correct transaction name/source. - # IDEALLY we will always move to using the isolation scope here - # but our integrations do all kinds of stuff with both isolation and current - # so I am keeping both for now as a best attempt solution till we get to a better state. - isolation_scope = sentry_sdk.get_isolation_scope() - current_scope = sentry_sdk.get_current_scope() - if ( - current_scope.transaction_name - and current_scope.transaction_source not in LOW_QUALITY_TRANSACTION_SOURCES - ): - trace_state = trace_state.update( - Baggage.SENTRY_PREFIX + "transaction", current_scope.transaction_name - ) - elif ( - isolation_scope.transaction_name - and isolation_scope.transaction_source - not in LOW_QUALITY_TRANSACTION_SOURCES - ): - trace_state = trace_state.update( - Baggage.SENTRY_PREFIX + "transaction", isolation_scope.transaction_name + root_span = get_sentry_meta(span, "root_span") + if root_span and isinstance(root_span, ReadableSpan): + transaction_name, transaction_source = extract_transaction_name_source( + root_span ) + if ( + transaction_name + and transaction_source not in LOW_QUALITY_TRANSACTION_SOURCES + ): + trace_state = trace_state.update( + Baggage.SENTRY_PREFIX + "transaction", transaction_name + ) + return trace_state + + +def get_sentry_meta(span, key): + # type: (Union[AbstractSpan, ReadableSpan], str) -> Any + sentry_meta = getattr(span, "_sentry_meta", None) + return sentry_meta.get(key) if sentry_meta else None + + +def set_sentry_meta(span, key, value): + # type: (Union[AbstractSpan, ReadableSpan], str, Any) -> None + sentry_meta = getattr(span, "_sentry_meta", {}) + sentry_meta[key] = value + span._sentry_meta = sentry_meta diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py index bdfa846257..513368c823 100644 --- a/sentry_sdk/tracing.py +++ b/sentry_sdk/tracing.py @@ -1326,8 +1326,12 @@ def containing_transaction(self): @property def root_span(self): # type: () -> Optional[POTelSpan] + from sentry_sdk.integrations.opentelemetry.utils import ( + get_sentry_meta, + ) + root_otel_span = cast( - "Optional[OtelSpan]", self._otel_span.sentry_meta.get("root_span", None) + "Optional[OtelSpan]", get_sentry_meta(self._otel_span, "root_span") ) return POTelSpan(otel_span=root_otel_span) if root_otel_span else None From 949ff5bc0d2f68f457da89e1ce3e0b35abafbcc8 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Tue, 22 Oct 2024 16:56:54 +0200 Subject: [PATCH 071/244] Fix breadcrumbs in redis (#3680) This moves the creation of `redis` breadcrumbs from the `maybe_create_breadcrumbs_from_span` into the integrations. And as well fixes some span related tests by using `render_span_tree`. --- .../integrations/redis/_async_common.py | 44 ++++++---- sentry_sdk/integrations/redis/_sync_common.py | 42 +++++---- .../integrations/redis/modules/caches.py | 22 +++-- .../integrations/redis/modules/queries.py | 24 ++--- sentry_sdk/integrations/redis/rb.py | 8 +- sentry_sdk/integrations/redis/redis.py | 12 +-- .../integrations/redis/redis_cluster.py | 33 +++---- .../redis/redis_py_cluster_legacy.py | 8 +- sentry_sdk/integrations/redis/utils.py | 77 +++++++++++++--- sentry_sdk/tracing_utils.py | 9 +- tests/integrations/redis/test_redis.py | 76 +++++++++------- .../redis/test_redis_cache_module.py | 87 +++++++++---------- .../redis/test_redis_cache_module_async.py | 60 ++++++------- 13 files changed, 292 insertions(+), 210 deletions(-) diff --git a/sentry_sdk/integrations/redis/_async_common.py b/sentry_sdk/integrations/redis/_async_common.py index 196e85e74b..6a136fe29a 100644 --- a/sentry_sdk/integrations/redis/_async_common.py +++ b/sentry_sdk/integrations/redis/_async_common.py @@ -3,12 +3,14 @@ from sentry_sdk.integrations.redis.consts import SPAN_ORIGIN from sentry_sdk.integrations.redis.modules.caches import ( _compile_cache_span_properties, - _set_cache_data, + _get_cache_data, ) from sentry_sdk.integrations.redis.modules.queries import _compile_db_span_properties from sentry_sdk.integrations.redis.utils import ( - _set_client_data, - _set_pipeline_data, + _create_breadcrumb, + _get_client_data, + _get_pipeline_data, + _update_span, ) from sentry_sdk.tracing import Span from sentry_sdk.utils import capture_internal_exceptions @@ -23,9 +25,9 @@ def patch_redis_async_pipeline( - pipeline_cls, is_cluster, get_command_args_fn, set_db_data_fn + pipeline_cls, is_cluster, get_command_args_fn, get_db_data_fn ): - # type: (Union[type[Pipeline[Any]], type[ClusterPipeline[Any]]], bool, Any, Callable[[Span, Any], None]) -> None + # type: (Union[type[Pipeline[Any]], type[ClusterPipeline[Any]]], bool, Any, Callable[[Any], dict[str, Any]]) -> None old_execute = pipeline_cls.execute from sentry_sdk.integrations.redis import RedisIntegration @@ -41,22 +43,25 @@ async def _sentry_execute(self, *args, **kwargs): origin=SPAN_ORIGIN, ) as span: with capture_internal_exceptions(): - set_db_data_fn(span, self) - _set_pipeline_data( - span, - is_cluster, - get_command_args_fn, - False if is_cluster else self.is_transaction, - self._command_stack if is_cluster else self.command_stack, + span_data = get_db_data_fn(self) + pipeline_data = _get_pipeline_data( + is_cluster=is_cluster, + get_command_args_fn=get_command_args_fn, + is_transaction=False if is_cluster else self.is_transaction, + command_stack=( + self._command_stack if is_cluster else self.command_stack + ), ) + _update_span(span, span_data, pipeline_data) + _create_breadcrumb("redis.pipeline.execute", span_data, pipeline_data) return await old_execute(self, *args, **kwargs) pipeline_cls.execute = _sentry_execute # type: ignore -def patch_redis_async_client(cls, is_cluster, set_db_data_fn): - # type: (Union[type[StrictRedis[Any]], type[RedisCluster[Any]]], bool, Callable[[Span, Any], None]) -> None +def patch_redis_async_client(cls, is_cluster, get_db_data_fn): + # type: (Union[type[StrictRedis[Any]], type[RedisCluster[Any]]], bool, Callable[[Any], dict[str, Any]]) -> None old_execute_command = cls.execute_command from sentry_sdk.integrations.redis import RedisIntegration @@ -92,15 +97,20 @@ async def _sentry_execute_command(self, name, *args, **kwargs): ) db_span.__enter__() - set_db_data_fn(db_span, self) - _set_client_data(db_span, is_cluster, name, *args) + db_span_data = get_db_data_fn(self) + db_client_span_data = _get_client_data(is_cluster, name, *args) + _update_span(db_span, db_span_data, db_client_span_data) + _create_breadcrumb( + db_properties["description"], db_span_data, db_client_span_data + ) value = await old_execute_command(self, name, *args, **kwargs) db_span.__exit__(None, None, None) if cache_span: - _set_cache_data(cache_span, self, cache_properties, value) + cache_span_data = _get_cache_data(self, cache_properties, value) + _update_span(cache_span, cache_span_data) cache_span.__exit__(None, None, None) return value diff --git a/sentry_sdk/integrations/redis/_sync_common.py b/sentry_sdk/integrations/redis/_sync_common.py index ef10e9e4f0..f4cb6ee1b8 100644 --- a/sentry_sdk/integrations/redis/_sync_common.py +++ b/sentry_sdk/integrations/redis/_sync_common.py @@ -3,12 +3,14 @@ from sentry_sdk.integrations.redis.consts import SPAN_ORIGIN from sentry_sdk.integrations.redis.modules.caches import ( _compile_cache_span_properties, - _set_cache_data, + _get_cache_data, ) from sentry_sdk.integrations.redis.modules.queries import _compile_db_span_properties from sentry_sdk.integrations.redis.utils import ( - _set_client_data, - _set_pipeline_data, + _create_breadcrumb, + _get_client_data, + _get_pipeline_data, + _update_span, ) from sentry_sdk.tracing import Span from sentry_sdk.utils import capture_internal_exceptions @@ -24,9 +26,9 @@ def patch_redis_pipeline( pipeline_cls, is_cluster, get_command_args_fn, - set_db_data_fn, + get_db_data_fn, ): - # type: (Any, bool, Any, Callable[[Span, Any], None]) -> None + # type: (Any, bool, Any, Callable[[Any], dict[str, Any]]) -> None old_execute = pipeline_cls.execute from sentry_sdk.integrations.redis import RedisIntegration @@ -42,22 +44,23 @@ def sentry_patched_execute(self, *args, **kwargs): origin=SPAN_ORIGIN, ) as span: with capture_internal_exceptions(): - set_db_data_fn(span, self) - _set_pipeline_data( - span, - is_cluster, - get_command_args_fn, - False if is_cluster else self.transaction, - self.command_stack, + span_data = get_db_data_fn(self) + pipeline_data = _get_pipeline_data( + is_cluster=is_cluster, + get_command_args_fn=get_command_args_fn, + is_transaction=False if is_cluster else self.transaction, + command_stack=self.command_stack, ) + _update_span(span, span_data, pipeline_data) + _create_breadcrumb("redis.pipeline.execute", span_data, pipeline_data) return old_execute(self, *args, **kwargs) pipeline_cls.execute = sentry_patched_execute -def patch_redis_client(cls, is_cluster, set_db_data_fn): - # type: (Any, bool, Callable[[Span, Any], None]) -> None +def patch_redis_client(cls, is_cluster, get_db_data_fn): + # type: (Any, bool, Callable[[Any], dict[str, Any]]) -> None """ This function can be used to instrument custom redis client classes or subclasses. @@ -97,15 +100,20 @@ def sentry_patched_execute_command(self, name, *args, **kwargs): ) db_span.__enter__() - set_db_data_fn(db_span, self) - _set_client_data(db_span, is_cluster, name, *args) + db_span_data = get_db_data_fn(self) + db_client_span_data = _get_client_data(is_cluster, name, *args) + _update_span(db_span, db_span_data, db_client_span_data) + _create_breadcrumb( + db_properties["description"], db_span_data, db_client_span_data + ) value = old_execute_command(self, name, *args, **kwargs) db_span.__exit__(None, None, None) if cache_span: - _set_cache_data(cache_span, self, cache_properties, value) + cache_span_data = _get_cache_data(self, cache_properties, value) + _update_span(cache_span, cache_span_data) cache_span.__exit__(None, None, None) return value diff --git a/sentry_sdk/integrations/redis/modules/caches.py b/sentry_sdk/integrations/redis/modules/caches.py index c6fc19f5b2..d93e729f2b 100644 --- a/sentry_sdk/integrations/redis/modules/caches.py +++ b/sentry_sdk/integrations/redis/modules/caches.py @@ -75,22 +75,24 @@ def _get_cache_span_description(redis_command, args, kwargs, integration): return description -def _set_cache_data(span, redis_client, properties, return_value): - # type: (Span, Any, dict[str, Any], Optional[Any]) -> None +def _get_cache_data(redis_client, properties, return_value): + # type: (Any, dict[str, Any], Optional[Any]) -> dict[str, Any] + data = {} + with capture_internal_exceptions(): - span.set_data(SPANDATA.CACHE_KEY, properties["key"]) + data[SPANDATA.CACHE_KEY] = properties["key"] if properties["redis_command"] in GET_COMMANDS: if return_value is not None: - span.set_data(SPANDATA.CACHE_HIT, True) + data[SPANDATA.CACHE_HIT] = True size = ( len(str(return_value).encode("utf-8")) if not isinstance(return_value, bytes) else len(return_value) ) - span.set_data(SPANDATA.CACHE_ITEM_SIZE, size) + data[SPANDATA.CACHE_ITEM_SIZE] = size else: - span.set_data(SPANDATA.CACHE_HIT, False) + data[SPANDATA.CACHE_HIT] = False elif properties["redis_command"] in SET_COMMANDS: if properties["value"] is not None: @@ -99,7 +101,7 @@ def _set_cache_data(span, redis_client, properties, return_value): if not isinstance(properties["value"], bytes) else len(properties["value"]) ) - span.set_data(SPANDATA.CACHE_ITEM_SIZE, size) + data[SPANDATA.CACHE_ITEM_SIZE] = size try: connection_params = redis_client.connection_pool.connection_kwargs @@ -114,8 +116,10 @@ def _set_cache_data(span, redis_client, properties, return_value): host = connection_params.get("host") if host is not None: - span.set_data(SPANDATA.NETWORK_PEER_ADDRESS, host) + data[SPANDATA.NETWORK_PEER_ADDRESS] = host port = connection_params.get("port") if port is not None: - span.set_data(SPANDATA.NETWORK_PEER_PORT, port) + data[SPANDATA.NETWORK_PEER_PORT] = port + + return data diff --git a/sentry_sdk/integrations/redis/modules/queries.py b/sentry_sdk/integrations/redis/modules/queries.py index e0d85a4ef7..e2189b7f9c 100644 --- a/sentry_sdk/integrations/redis/modules/queries.py +++ b/sentry_sdk/integrations/redis/modules/queries.py @@ -43,26 +43,30 @@ def _get_db_span_description(integration, command_name, args): return description -def _set_db_data_on_span(span, connection_params): - # type: (Span, dict[str, Any]) -> None - span.set_data(SPANDATA.DB_SYSTEM, "redis") +def _get_connection_data(connection_params): + # type: (dict[str, Any]) -> dict[str, Any] + data = { + SPANDATA.DB_SYSTEM: "redis", + } db = connection_params.get("db") if db is not None: - span.set_data(SPANDATA.DB_NAME, str(db)) + data[SPANDATA.DB_NAME] = str(db) host = connection_params.get("host") if host is not None: - span.set_data(SPANDATA.SERVER_ADDRESS, host) + data[SPANDATA.SERVER_ADDRESS] = host port = connection_params.get("port") if port is not None: - span.set_data(SPANDATA.SERVER_PORT, port) + data[SPANDATA.SERVER_PORT] = port + + return data -def _set_db_data(span, redis_instance): - # type: (Span, Redis[Any]) -> None +def _get_db_data(redis_instance): + # type: (Redis[Any]) -> dict[str, Any] try: - _set_db_data_on_span(span, redis_instance.connection_pool.connection_kwargs) + return _get_connection_data(redis_instance.connection_pool.connection_kwargs) except AttributeError: - pass # connections_kwargs may be missing in some cases + return {} # connections_kwargs may be missing in some cases diff --git a/sentry_sdk/integrations/redis/rb.py b/sentry_sdk/integrations/redis/rb.py index 1b3e2e530c..68d3c3a9d6 100644 --- a/sentry_sdk/integrations/redis/rb.py +++ b/sentry_sdk/integrations/redis/rb.py @@ -5,7 +5,7 @@ """ from sentry_sdk.integrations.redis._sync_common import patch_redis_client -from sentry_sdk.integrations.redis.modules.queries import _set_db_data +from sentry_sdk.integrations.redis.modules.queries import _get_db_data def _patch_rb(): @@ -18,15 +18,15 @@ def _patch_rb(): patch_redis_client( rb.clients.FanoutClient, is_cluster=False, - set_db_data_fn=_set_db_data, + get_db_data_fn=_get_db_data, ) patch_redis_client( rb.clients.MappingClient, is_cluster=False, - set_db_data_fn=_set_db_data, + get_db_data_fn=_get_db_data, ) patch_redis_client( rb.clients.RoutingClient, is_cluster=False, - set_db_data_fn=_set_db_data, + get_db_data_fn=_get_db_data, ) diff --git a/sentry_sdk/integrations/redis/redis.py b/sentry_sdk/integrations/redis/redis.py index c92958a32d..935a828c3d 100644 --- a/sentry_sdk/integrations/redis/redis.py +++ b/sentry_sdk/integrations/redis/redis.py @@ -8,7 +8,7 @@ patch_redis_client, patch_redis_pipeline, ) -from sentry_sdk.integrations.redis.modules.queries import _set_db_data +from sentry_sdk.integrations.redis.modules.queries import _get_db_data from typing import TYPE_CHECKING @@ -26,13 +26,13 @@ def _patch_redis(StrictRedis, client): # noqa: N803 patch_redis_client( StrictRedis, is_cluster=False, - set_db_data_fn=_set_db_data, + get_db_data_fn=_get_db_data, ) patch_redis_pipeline( client.Pipeline, is_cluster=False, get_command_args_fn=_get_redis_command_args, - set_db_data_fn=_set_db_data, + get_db_data_fn=_get_db_data, ) try: strict_pipeline = client.StrictPipeline @@ -43,7 +43,7 @@ def _patch_redis(StrictRedis, client): # noqa: N803 strict_pipeline, is_cluster=False, get_command_args_fn=_get_redis_command_args, - set_db_data_fn=_set_db_data, + get_db_data_fn=_get_db_data, ) try: @@ -59,11 +59,11 @@ def _patch_redis(StrictRedis, client): # noqa: N803 patch_redis_async_client( redis.asyncio.client.StrictRedis, is_cluster=False, - set_db_data_fn=_set_db_data, + get_db_data_fn=_get_db_data, ) patch_redis_async_pipeline( redis.asyncio.client.Pipeline, False, _get_redis_command_args, - set_db_data_fn=_set_db_data, + get_db_data_fn=_get_db_data, ) diff --git a/sentry_sdk/integrations/redis/redis_cluster.py b/sentry_sdk/integrations/redis/redis_cluster.py index 80cdc7235a..dbcd20a65d 100644 --- a/sentry_sdk/integrations/redis/redis_cluster.py +++ b/sentry_sdk/integrations/redis/redis_cluster.py @@ -9,7 +9,7 @@ patch_redis_client, patch_redis_pipeline, ) -from sentry_sdk.integrations.redis.modules.queries import _set_db_data_on_span +from sentry_sdk.integrations.redis.modules.queries import _get_connection_data from sentry_sdk.integrations.redis.utils import _parse_rediscluster_command from sentry_sdk.utils import capture_internal_exceptions @@ -26,26 +26,27 @@ from sentry_sdk.tracing import Span -def _set_async_cluster_db_data(span, async_redis_cluster_instance): - # type: (Span, AsyncRedisCluster[Any]) -> None +def _get_async_cluster_db_data(async_redis_cluster_instance): + # type: (AsyncRedisCluster[Any]) -> dict[str, Any] default_node = async_redis_cluster_instance.get_default_node() if default_node is not None and default_node.connection_kwargs is not None: - _set_db_data_on_span(span, default_node.connection_kwargs) + return _get_connection_data(default_node.connection_kwargs) + else: + return {} -def _set_async_cluster_pipeline_db_data(span, async_redis_cluster_pipeline_instance): - # type: (Span, AsyncClusterPipeline[Any]) -> None +def _get_async_cluster_pipeline_db_data(async_redis_cluster_pipeline_instance): + # type: (AsyncClusterPipeline[Any]) -> dict[str, Any] with capture_internal_exceptions(): - _set_async_cluster_db_data( - span, + return _get_async_cluster_db_data( # the AsyncClusterPipeline has always had a `_client` attr but it is private so potentially problematic and mypy # does not recognize it - see https://github.com/redis/redis-py/blame/v5.0.0/redis/asyncio/cluster.py#L1386 async_redis_cluster_pipeline_instance._client, # type: ignore[attr-defined] ) -def _set_cluster_db_data(span, redis_cluster_instance): - # type: (Span, RedisCluster[Any]) -> None +def _get_cluster_db_data(redis_cluster_instance): + # type: (RedisCluster[Any]) -> dict[str, Any] default_node = redis_cluster_instance.get_default_node() if default_node is not None: @@ -53,7 +54,9 @@ def _set_cluster_db_data(span, redis_cluster_instance): "host": default_node.host, "port": default_node.port, } - _set_db_data_on_span(span, connection_params) + return _get_connection_data(connection_params) + else: + return {} def _patch_redis_cluster(): @@ -67,13 +70,13 @@ def _patch_redis_cluster(): patch_redis_client( RedisCluster, is_cluster=True, - set_db_data_fn=_set_cluster_db_data, + get_db_data_fn=_get_cluster_db_data, ) patch_redis_pipeline( cluster.ClusterPipeline, is_cluster=True, get_command_args_fn=_parse_rediscluster_command, - set_db_data_fn=_set_cluster_db_data, + get_db_data_fn=_get_cluster_db_data, ) try: @@ -89,11 +92,11 @@ def _patch_redis_cluster(): patch_redis_async_client( async_cluster.RedisCluster, is_cluster=True, - set_db_data_fn=_set_async_cluster_db_data, + get_db_data_fn=_get_async_cluster_db_data, ) patch_redis_async_pipeline( async_cluster.ClusterPipeline, is_cluster=True, get_command_args_fn=_parse_rediscluster_command, - set_db_data_fn=_set_async_cluster_pipeline_db_data, + get_db_data_fn=_get_async_cluster_pipeline_db_data, ) diff --git a/sentry_sdk/integrations/redis/redis_py_cluster_legacy.py b/sentry_sdk/integrations/redis/redis_py_cluster_legacy.py index ad1c23633f..53b545c21b 100644 --- a/sentry_sdk/integrations/redis/redis_py_cluster_legacy.py +++ b/sentry_sdk/integrations/redis/redis_py_cluster_legacy.py @@ -9,7 +9,7 @@ patch_redis_client, patch_redis_pipeline, ) -from sentry_sdk.integrations.redis.modules.queries import _set_db_data +from sentry_sdk.integrations.redis.modules.queries import _get_db_data from sentry_sdk.integrations.redis.utils import _parse_rediscluster_command @@ -23,7 +23,7 @@ def _patch_rediscluster(): patch_redis_client( rediscluster.RedisCluster, is_cluster=True, - set_db_data_fn=_set_db_data, + get_db_data_fn=_get_db_data, ) # up to v1.3.6, __version__ attribute is a tuple @@ -37,7 +37,7 @@ def _patch_rediscluster(): patch_redis_client( rediscluster.StrictRedisCluster, is_cluster=True, - set_db_data_fn=_set_db_data, + get_db_data_fn=_get_db_data, ) else: pipeline_cls = rediscluster.pipeline.ClusterPipeline @@ -46,5 +46,5 @@ def _patch_rediscluster(): pipeline_cls, is_cluster=True, get_command_args_fn=_parse_rediscluster_command, - set_db_data_fn=_set_db_data, + get_db_data_fn=_get_db_data, ) diff --git a/sentry_sdk/integrations/redis/utils.py b/sentry_sdk/integrations/redis/utils.py index 894c56305b..9eb16c5bc4 100644 --- a/sentry_sdk/integrations/redis/utils.py +++ b/sentry_sdk/integrations/redis/utils.py @@ -1,3 +1,4 @@ +import sentry_sdk from sentry_sdk.consts import SPANDATA from sentry_sdk.integrations.redis.consts import ( _COMMANDS_INCLUDING_SENSITIVE_DATA, @@ -16,6 +17,47 @@ from sentry_sdk.tracing import Span +TAG_KEYS = [ + "redis.command", + "redis.is_cluster", + "redis.key", + "redis.transaction", + SPANDATA.DB_OPERATION, +] + + +def _update_span(span, *data_bags): + # type: (Span, *dict[str, Any]) -> None + """ + Set tags and data on the given span to data from the given data bags. + """ + for data in data_bags: + for key, value in data.items(): + if key in TAG_KEYS: + span.set_tag(key, value) + else: + span.set_data(key, value) + + +def _create_breadcrumb(message, *data_bags): + # type: (str, *dict[str, Any]) -> None + """ + Create a breadcrumb containing the tags data from the given data bags. + """ + data = {} + for data in data_bags: + for key, value in data.items(): + if key in TAG_KEYS: + data[key] = value + + sentry_sdk.add_breadcrumb( + message=message, + type="redis", + category="redis", + data=data, + ) + + def _get_safe_command(name, args): # type: (str, Sequence[Any]) -> str command_parts = [name] @@ -105,12 +147,12 @@ def _parse_rediscluster_command(command): return command.args -def _set_pipeline_data( - span, is_cluster, get_command_args_fn, is_transaction, command_stack -): - # type: (Span, bool, Any, bool, Sequence[Any]) -> None - span.set_tag("redis.is_cluster", is_cluster) - span.set_tag("redis.transaction", is_transaction) +def _get_pipeline_data(is_cluster, get_command_args_fn, is_transaction, command_stack): + # type: (bool, Any, bool, Sequence[Any]) -> dict[str, Any] + data = { + "redis.is_cluster": is_cluster, + "redis.transaction": is_transaction, + } # type: dict[str, Any] commands = [] for i, arg in enumerate(command_stack): @@ -120,20 +162,27 @@ def _set_pipeline_data( command = get_command_args_fn(arg) commands.append(_get_safe_command(command[0], command[1:])) - span.set_data("redis.commands.count", len(command_stack)) - span.set_data("redis.commands.first_ten", commands) + data["redis.commands.count"] = len(command_stack) + data["redis.commands.first_ten"] = commands + return data + + +def _get_client_data(is_cluster, name, *args): + # type: (bool, str, *Any) -> dict[str, Any] + data = { + "redis.is_cluster": is_cluster, + } # type: dict[str, Any] -def _set_client_data(span, is_cluster, name, *args): - # type: (Span, bool, str, *Any) -> None - span.set_tag("redis.is_cluster", is_cluster) if name: - span.set_tag("redis.command", name) - span.set_tag(SPANDATA.DB_OPERATION, name) + data["redis.command"] = name + data[SPANDATA.DB_OPERATION] = name if name and args: name_low = name.lower() if (name_low in _SINGLE_KEY_COMMANDS) or ( name_low in _MULTI_KEY_COMMANDS and len(args) == 1 ): - span.set_tag("redis.key", args[0]) + data["redis.key"] = args[0] + + return data diff --git a/sentry_sdk/tracing_utils.py b/sentry_sdk/tracing_utils.py index f063897cb9..2c6e7f2a7a 100644 --- a/sentry_sdk/tracing_utils.py +++ b/sentry_sdk/tracing_utils.py @@ -157,14 +157,7 @@ def record_sql_queries( def maybe_create_breadcrumbs_from_span(scope, span): # type: (sentry_sdk.Scope, sentry_sdk.tracing.Span) -> None - if span.op == OP.DB_REDIS: - scope.add_breadcrumb( - message=span.description, - type="redis", - category="redis", - data=span._tags, - ) - elif span.op == OP.HTTP_CLIENT: + if span.op == OP.HTTP_CLIENT: scope.add_breadcrumb( type="http", category="httplib", diff --git a/tests/integrations/redis/test_redis.py b/tests/integrations/redis/test_redis.py index aff5b3ec9b..f8225bed79 100644 --- a/tests/integrations/redis/test_redis.py +++ b/tests/integrations/redis/test_redis.py @@ -80,7 +80,7 @@ def test_redis_pipeline( } -def test_sensitive_data(sentry_init, capture_events): +def test_sensitive_data(sentry_init, capture_events, render_span_tree): # fakeredis does not support the AUTH command, so we need to mock it with mock.patch( "sentry_sdk.integrations.redis.utils._COMMANDS_INCLUDING_SENSITIVE_DATA", @@ -100,12 +100,16 @@ def test_sensitive_data(sentry_init, capture_events): ) # because fakeredis does not support AUTH we use GET instead (event,) = events - spans = event["spans"] - assert spans[0]["op"] == "db.redis" - assert spans[0]["description"] == "GET [Filtered]" + assert ( + render_span_tree(event) + == """\ +- op="": description=null + - op="db.redis": description="GET [Filtered]"\ +""" + ) -def test_pii_data_redacted(sentry_init, capture_events): +def test_pii_data_redacted(sentry_init, capture_events, render_span_tree): sentry_init( integrations=[RedisIntegration()], traces_sample_rate=1.0, @@ -120,15 +124,19 @@ def test_pii_data_redacted(sentry_init, capture_events): connection.delete("somekey1", "somekey2") (event,) = events - spans = event["spans"] - assert spans[0]["op"] == "db.redis" - assert spans[0]["description"] == "SET 'somekey1' [Filtered]" - assert spans[1]["description"] == "SET 'somekey2' [Filtered]" - assert spans[2]["description"] == "GET 'somekey2'" - assert spans[3]["description"] == "DEL 'somekey1' [Filtered]" + assert ( + render_span_tree(event) + == """\ +- op="": description=null + - op="db.redis": description="SET 'somekey1' [Filtered]" + - op="db.redis": description="SET 'somekey2' [Filtered]" + - op="db.redis": description="GET 'somekey2'" + - op="db.redis": description="DEL 'somekey1' [Filtered]"\ +""" + ) -def test_pii_data_sent(sentry_init, capture_events): +def test_pii_data_sent(sentry_init, capture_events, render_span_tree): sentry_init( integrations=[RedisIntegration()], traces_sample_rate=1.0, @@ -144,15 +152,19 @@ def test_pii_data_sent(sentry_init, capture_events): connection.delete("somekey1", "somekey2") (event,) = events - spans = event["spans"] - assert spans[0]["op"] == "db.redis" - assert spans[0]["description"] == "SET 'somekey1' 'my secret string1'" - assert spans[1]["description"] == "SET 'somekey2' 'my secret string2'" - assert spans[2]["description"] == "GET 'somekey2'" - assert spans[3]["description"] == "DEL 'somekey1' 'somekey2'" + assert ( + render_span_tree(event) + == """\ +- op="": description=null + - op="db.redis": description="SET 'somekey1' 'my secret string1'" + - op="db.redis": description="SET 'somekey2' 'my secret string2'" + - op="db.redis": description="GET 'somekey2'" + - op="db.redis": description="DEL 'somekey1' 'somekey2'"\ +""" + ) -def test_data_truncation(sentry_init, capture_events): +def test_data_truncation(sentry_init, capture_events, render_span_tree): sentry_init( integrations=[RedisIntegration()], traces_sample_rate=1.0, @@ -168,15 +180,17 @@ def test_data_truncation(sentry_init, capture_events): connection.set("somekey2", short_string) (event,) = events - spans = event["spans"] - assert spans[0]["op"] == "db.redis" - assert spans[0]["description"] == "SET 'somekey1' '%s..." % ( - long_string[: 1024 - len("...") - len("SET 'somekey1' '")], + assert ( + render_span_tree(event) + == f"""\ +- op="": description=null + - op="db.redis": description="SET 'somekey1' '{long_string[: 1024 - len("...") - len("SET 'somekey1' '")]}..." + - op="db.redis": description="SET 'somekey2' 'bbbbbbbbbb'"\ +""" ) - assert spans[1]["description"] == "SET 'somekey2' '%s'" % (short_string,) -def test_data_truncation_custom(sentry_init, capture_events): +def test_data_truncation_custom(sentry_init, capture_events, render_span_tree): sentry_init( integrations=[RedisIntegration(max_data_size=30)], traces_sample_rate=1.0, @@ -192,12 +206,14 @@ def test_data_truncation_custom(sentry_init, capture_events): connection.set("somekey2", short_string) (event,) = events - spans = event["spans"] - assert spans[0]["op"] == "db.redis" - assert spans[0]["description"] == "SET 'somekey1' '%s..." % ( - long_string[: 30 - len("...") - len("SET 'somekey1' '")], + assert ( + render_span_tree(event) + == f"""\ +- op="": description=null + - op="db.redis": description="SET 'somekey1' '{long_string[: 30 - len("...") - len("SET 'somekey1' '")]}..." + - op="db.redis": description="SET 'somekey2' '{short_string}'"\ +""" ) - assert spans[1]["description"] == "SET 'somekey2' '%s'" % (short_string,) def test_breadcrumbs(sentry_init, capture_events): diff --git a/tests/integrations/redis/test_redis_cache_module.py b/tests/integrations/redis/test_redis_cache_module.py index f118aa53f5..68f915c2e5 100644 --- a/tests/integrations/redis/test_redis_cache_module.py +++ b/tests/integrations/redis/test_redis_cache_module.py @@ -14,7 +14,7 @@ FAKEREDIS_VERSION = parse_version(fakeredis.__version__) -def test_no_cache_basic(sentry_init, capture_events): +def test_no_cache_basic(sentry_init, capture_events, render_span_tree): sentry_init( integrations=[ RedisIntegration(), @@ -28,12 +28,16 @@ def test_no_cache_basic(sentry_init, capture_events): connection.get("mycachekey") (event,) = events - spans = event["spans"] - assert len(spans) == 1 - assert spans[0]["op"] == "db.redis" + assert ( + render_span_tree(event) + == """\ +- op="": description=null + - op="db.redis": description="GET 'mycachekey'"\ +""" + ) -def test_cache_basic(sentry_init, capture_events): +def test_cache_basic(sentry_init, capture_events, render_span_tree): sentry_init( integrations=[ RedisIntegration( @@ -53,31 +57,25 @@ def test_cache_basic(sentry_init, capture_events): connection.mget("mycachekey1", "mycachekey2") (event,) = events - spans = event["spans"] - assert len(spans) == 9 - - # no cache support for hget command - assert spans[0]["op"] == "db.redis" - assert spans[0]["tags"]["redis.command"] == "HGET" - - assert spans[1]["op"] == "cache.get" - assert spans[2]["op"] == "db.redis" - assert spans[2]["tags"]["redis.command"] == "GET" - - assert spans[3]["op"] == "cache.put" - assert spans[4]["op"] == "db.redis" - assert spans[4]["tags"]["redis.command"] == "SET" - - assert spans[5]["op"] == "cache.put" - assert spans[6]["op"] == "db.redis" - assert spans[6]["tags"]["redis.command"] == "SETEX" - - assert spans[7]["op"] == "cache.get" - assert spans[8]["op"] == "db.redis" - assert spans[8]["tags"]["redis.command"] == "MGET" + # no cache support for HGET command + assert ( + render_span_tree(event) + == """\ +- op="": description=null + - op="db.redis": description="HGET 'mycachekey' [Filtered]" + - op="cache.get": description="mycachekey" + - op="db.redis": description="GET 'mycachekey'" + - op="cache.put": description="mycachekey1" + - op="db.redis": description="SET 'mycachekey1' [Filtered]" + - op="cache.put": description="mycachekey2" + - op="db.redis": description="SETEX 'mycachekey2' [Filtered] [Filtered]" + - op="cache.get": description="mycachekey1, mycachekey2" + - op="db.redis": description="MGET 'mycachekey1' [Filtered]"\ +""" + ) -def test_cache_keys(sentry_init, capture_events): +def test_cache_keys(sentry_init, capture_events, render_span_tree): sentry_init( integrations=[ RedisIntegration( @@ -96,23 +94,18 @@ def test_cache_keys(sentry_init, capture_events): connection.get("bl") (event,) = events - spans = event["spans"] - assert len(spans) == 6 - assert spans[0]["op"] == "db.redis" - assert spans[0]["description"] == "GET 'somethingelse'" - - assert spans[1]["op"] == "cache.get" - assert spans[1]["description"] == "blub" - assert spans[2]["op"] == "db.redis" - assert spans[2]["description"] == "GET 'blub'" - - assert spans[3]["op"] == "cache.get" - assert spans[3]["description"] == "blubkeything" - assert spans[4]["op"] == "db.redis" - assert spans[4]["description"] == "GET 'blubkeything'" - - assert spans[5]["op"] == "db.redis" - assert spans[5]["description"] == "GET 'bl'" + assert ( + render_span_tree(event) + == """\ +- op="": description=null + - op="db.redis": description="GET 'somethingelse'" + - op="cache.get": description="blub" + - op="db.redis": description="GET 'blub'" + - op="cache.get": description="blubkeything" + - op="db.redis": description="GET 'blubkeything'" + - op="db.redis": description="GET 'bl'"\ +""" + ) def test_cache_data(sentry_init, capture_events): @@ -133,7 +126,7 @@ def test_cache_data(sentry_init, capture_events): connection.get("mycachekey") (event,) = events - spans = event["spans"] + spans = sorted(event["spans"], key=lambda x: x["start_timestamp"]) assert len(spans) == 6 @@ -222,7 +215,7 @@ def test_cache_prefixes(sentry_init, capture_events): (event,) = events - spans = event["spans"] + spans = sorted(event["spans"], key=lambda x: x["start_timestamp"]) assert len(spans) == 13 # 8 db spans + 5 cache spans cache_spans = [span for span in spans if span["op"] == "cache.get"] diff --git a/tests/integrations/redis/test_redis_cache_module_async.py b/tests/integrations/redis/test_redis_cache_module_async.py index d607f92fbd..a6ea06a973 100644 --- a/tests/integrations/redis/test_redis_cache_module_async.py +++ b/tests/integrations/redis/test_redis_cache_module_async.py @@ -21,7 +21,7 @@ @pytest.mark.asyncio -async def test_no_cache_basic(sentry_init, capture_events): +async def test_no_cache_basic(sentry_init, capture_events, render_span_tree): sentry_init( integrations=[ RedisIntegration(), @@ -35,13 +35,17 @@ async def test_no_cache_basic(sentry_init, capture_events): await connection.get("myasynccachekey") (event,) = events - spans = event["spans"] - assert len(spans) == 1 - assert spans[0]["op"] == "db.redis" + assert ( + render_span_tree(event) + == """\ +- op="": description=null + - op="db.redis": description="GET 'myasynccachekey'"\ +""" + ) @pytest.mark.asyncio -async def test_cache_basic(sentry_init, capture_events): +async def test_cache_basic(sentry_init, capture_events, render_span_tree): sentry_init( integrations=[ RedisIntegration( @@ -57,15 +61,18 @@ async def test_cache_basic(sentry_init, capture_events): await connection.get("myasynccachekey") (event,) = events - spans = event["spans"] - assert len(spans) == 2 - - assert spans[0]["op"] == "cache.get" - assert spans[1]["op"] == "db.redis" + assert ( + render_span_tree(event) + == """\ +- op="": description=null + - op="cache.get": description="myasynccachekey" + - op="db.redis": description="GET 'myasynccachekey'"\ +""" + ) @pytest.mark.asyncio -async def test_cache_keys(sentry_init, capture_events): +async def test_cache_keys(sentry_init, capture_events, render_span_tree): sentry_init( integrations=[ RedisIntegration( @@ -84,23 +91,18 @@ async def test_cache_keys(sentry_init, capture_events): await connection.get("abl") (event,) = events - spans = event["spans"] - assert len(spans) == 6 - assert spans[0]["op"] == "db.redis" - assert spans[0]["description"] == "GET 'asomethingelse'" - - assert spans[1]["op"] == "cache.get" - assert spans[1]["description"] == "ablub" - assert spans[2]["op"] == "db.redis" - assert spans[2]["description"] == "GET 'ablub'" - - assert spans[3]["op"] == "cache.get" - assert spans[3]["description"] == "ablubkeything" - assert spans[4]["op"] == "db.redis" - assert spans[4]["description"] == "GET 'ablubkeything'" - - assert spans[5]["op"] == "db.redis" - assert spans[5]["description"] == "GET 'abl'" + assert ( + render_span_tree(event) + == """\ +- op="": description=null + - op="db.redis": description="GET 'asomethingelse'" + - op="cache.get": description="ablub" + - op="db.redis": description="GET 'ablub'" + - op="cache.get": description="ablubkeything" + - op="db.redis": description="GET 'ablubkeything'" + - op="db.redis": description="GET 'abl'"\ +""" + ) @pytest.mark.asyncio @@ -122,7 +124,7 @@ async def test_cache_data(sentry_init, capture_events): await connection.get("myasynccachekey") (event,) = events - spans = event["spans"] + spans = sorted(event["spans"], key=lambda x: x["start_timestamp"]) assert len(spans) == 6 From 4179751efee9569210c4445608204ea4adb962fe Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Wed, 23 Oct 2024 11:25:28 +0200 Subject: [PATCH 072/244] Enable back pressure monitor in Potel (#3602) --- .../opentelemetry/potel_span_processor.py | 7 +++-- .../integrations/opentelemetry/sampler.py | 29 ++++++++++++++--- tests/test_monitor.py | 31 ++++++++++++++++--- 3 files changed, 55 insertions(+), 12 deletions(-) diff --git a/sentry_sdk/integrations/opentelemetry/potel_span_processor.py b/sentry_sdk/integrations/opentelemetry/potel_span_processor.py index fb20c7abfe..383234cba4 100644 --- a/sentry_sdk/integrations/opentelemetry/potel_span_processor.py +++ b/sentry_sdk/integrations/opentelemetry/potel_span_processor.py @@ -62,11 +62,12 @@ def on_end(self, span): if is_sentry_span(span): return - if span.parent and not span.parent.is_remote: - self._children_spans[span.parent.span_id].append(span) - else: + is_root_span = not span.parent or span.parent.is_remote + if is_root_span: # if have a root span ending, we build a transaction and send it self._flush_root_span(span) + else: + self._children_spans[span.parent.span_id].append(span) # TODO-neel-potel not sure we need a clear like JS def shutdown(self): diff --git a/sentry_sdk/integrations/opentelemetry/sampler.py b/sentry_sdk/integrations/opentelemetry/sampler.py index 8ffad41b86..b5c500b3f3 100644 --- a/sentry_sdk/integrations/opentelemetry/sampler.py +++ b/sentry_sdk/integrations/opentelemetry/sampler.py @@ -1,5 +1,5 @@ +import random from typing import cast -from random import random from opentelemetry import trace @@ -46,10 +46,10 @@ def get_parent_sampled(parent_context, trace_id): return None -def dropped_result(span_context, attributes, sample_rate=None): +def dropped_result(parent_span_context, attributes, sample_rate=None): # type: (SpanContext, Attributes, Optional[float]) -> SamplingResult # these will only be added the first time in a root span sampling decision - trace_state = span_context.trace_state + trace_state = parent_span_context.trace_state if TRACESTATE_SAMPLED_KEY not in trace_state: trace_state = trace_state.add(TRACESTATE_SAMPLED_KEY, "false") @@ -57,6 +57,22 @@ def dropped_result(span_context, attributes, sample_rate=None): if sample_rate and TRACESTATE_SAMPLE_RATE_KEY not in trace_state: trace_state = trace_state.add(TRACESTATE_SAMPLE_RATE_KEY, str(sample_rate)) + is_root_span = not ( + parent_span_context.is_valid and not parent_span_context.is_remote + ) + if is_root_span: + # Tell Sentry why we dropped the transaction/root-span + client = sentry_sdk.get_client() + if client.monitor and client.monitor.downsample_factor > 0: + reason = "backpressure" + else: + reason = "sample_rate" + + client.transport.record_lost_event(reason, data_category="transaction") + + # Only one span (the transaction itself) is discarded, since we did not record any spans here. + client.transport.record_lost_event(reason, data_category="span") + return SamplingResult( Decision.DROP, attributes=attributes, @@ -136,9 +152,14 @@ def should_sample( ) return dropped_result(parent_span_context, attributes) + # Down-sample in case of back pressure monitor says so + # TODO: this should only be done for transactions (aka root spans) + if client.monitor: + sample_rate /= 2**client.monitor.downsample_factor + # Roll the dice on sample rate sample_rate = float(cast("Union[bool, float, int]", sample_rate)) - sampled = random() < sample_rate + sampled = random.random() < sample_rate if sampled: return sampled_result(parent_span_context, attributes, sample_rate) diff --git a/tests/test_monitor.py b/tests/test_monitor.py index 03e415b5cc..041169d515 100644 --- a/tests/test_monitor.py +++ b/tests/test_monitor.py @@ -55,14 +55,16 @@ def test_monitor_unhealthy(sentry_init): assert monitor.downsample_factor == (i + 1 if i < 10 else 10) -def test_transaction_uses_downsampled_rate( - sentry_init, capture_record_lost_event_calls, monkeypatch +def test_transaction_uses_downsample_rate( + sentry_init, capture_envelopes, capture_record_lost_event_calls, monkeypatch ): sentry_init( traces_sample_rate=1.0, transport=UnhealthyTestTransport(), ) + envelopes = capture_envelopes() + record_lost_event_calls = capture_record_lost_event_calls() monitor = sentry_sdk.get_client().monitor @@ -77,13 +79,32 @@ def test_transaction_uses_downsampled_rate( assert monitor.downsample_factor == 1 with sentry_sdk.start_transaction(name="foobar") as transaction: + with sentry_sdk.start_span(name="foospan"): + with sentry_sdk.start_span(name="foospan2"): + with sentry_sdk.start_span(name="foospan3"): + ... + assert transaction.sampled is False - assert transaction.sample_rate == 0.5 + assert ( + transaction.sample_rate == 0.5 + ) # TODO: this fails until we put the sample_rate in the POTelSpan + + assert len(envelopes) == 0 assert Counter(record_lost_event_calls) == Counter( [ - ("backpressure", "transaction", None, 1), - ("backpressure", "span", None, 1), + ( + "backpressure", + "transaction", + None, + 1, + ), + ( + "backpressure", + "span", + None, + 1, + ), # Only one span (the transaction itself) is counted, since we did not record any spans in the first place. ] ) From f02c204b2b1effe892f51ab2a7a2de6d496e189c Mon Sep 17 00:00:00 2001 From: Ivana Kellyer Date: Wed, 23 Oct 2024 13:31:07 +0200 Subject: [PATCH 073/244] Fix asyncpg breadcrumbs (#3685) --- sentry_sdk/integrations/asyncpg.py | 79 +++++++++++----------- tests/integrations/asyncpg/test_asyncpg.py | 20 ++---- 2 files changed, 45 insertions(+), 54 deletions(-) diff --git a/sentry_sdk/integrations/asyncpg.py b/sentry_sdk/integrations/asyncpg.py index c1f2557a20..1113fd199c 100644 --- a/sentry_sdk/integrations/asyncpg.py +++ b/sentry_sdk/integrations/asyncpg.py @@ -1,6 +1,6 @@ from __future__ import annotations import contextlib -from typing import Any, TypeVar, Callable, Awaitable, Iterator +from typing import Any, TypeVar, Callable, Awaitable, Iterator, Optional import sentry_sdk from sentry_sdk.consts import OP, SPANDATA @@ -21,6 +21,7 @@ except ImportError: raise DidNotEnable("asyncpg not installed.") + # asyncpg.__version__ is a string containing the semantic version in the form of ".." asyncpg_version = parse_version(asyncpg.__version__) @@ -123,10 +124,13 @@ def _wrap_connection_method( async def _inner(*args: Any, **kwargs: Any) -> T: if sentry_sdk.get_client().get_integration(AsyncPGIntegration) is None: return await f(*args, **kwargs) + query = args[1] params_list = args[2] if len(args) > 2 else None + with _record(None, query, params_list, executemany=executemany) as span: - _set_db_data(span, args[0]) + data = _get_db_data(conn=args[0]) + _set_on_span(span, data) res = await f(*args, **kwargs) return res @@ -146,7 +150,8 @@ def _inner(*args: Any, **kwargs: Any) -> T: # noqa: N807 params_list, executemany=False, ) as span: - _set_db_data(span, args[0]) + data = _get_db_data(conn=args[0]) + _set_on_span(span, data) res = f(*args, **kwargs) span.set_attribute("db.cursor", _serialize_span_attribute(res)) @@ -160,41 +165,19 @@ async def _inner(*args: Any, **kwargs: Any) -> T: if sentry_sdk.get_client().get_integration(AsyncPGIntegration) is None: return await f(*args, **kwargs) - user = kwargs["params"].user - database = kwargs["params"].database - with sentry_sdk.start_span( op=OP.DB, name="connect", origin=AsyncPGIntegration.origin, ) as span: - span.set_attribute(SPANDATA.DB_SYSTEM, "postgresql") - addr = kwargs.get("addr") - if addr: - try: - span.set_attribute(SPANDATA.SERVER_ADDRESS, addr[0]) - span.set_attribute(SPANDATA.SERVER_PORT, addr[1]) - except IndexError: - pass - - span.set_attribute(SPANDATA.DB_NAME, database) - span.set_attribute(SPANDATA.DB_USER, user) + data = _get_db_data( + addr=kwargs.get("addr"), + database=kwargs["params"].database, + user=kwargs["params"].user, + ) + _set_on_span(span, data) with capture_internal_exceptions(): - data = {} - for attr in ( - "db.cursor", - "db.params", - "db.paramstyle", - SPANDATA.DB_NAME, - SPANDATA.DB_SYSTEM, - SPANDATA.DB_USER, - SPANDATA.SERVER_ADDRESS, - SPANDATA.SERVER_PORT, - ): - if span.get_attribute(attr): - data[attr] = span.get_attribute(attr) - sentry_sdk.add_breadcrumb( message="connect", category="query", data=data ) @@ -206,21 +189,37 @@ async def _inner(*args: Any, **kwargs: Any) -> T: return _inner -def _set_db_data(span: Span, conn: Any) -> None: - span.set_attribute(SPANDATA.DB_SYSTEM, "postgresql") +def _get_db_data( + conn: Any = None, + addr: Optional[tuple[str]] = None, + database: Optional[str] = None, + user: Optional[str] = None, +) -> dict[str, str]: + if conn is not None: + addr = conn._addr + database = conn._params.database + user = conn._params.user + + data = { + SPANDATA.DB_SYSTEM: "postgresql", + } - addr = conn._addr if addr: try: - span.set_attribute(SPANDATA.SERVER_ADDRESS, addr[0]) - span.set_attribute(SPANDATA.SERVER_PORT, addr[1]) + data[SPANDATA.SERVER_ADDRESS] = addr[0] + data[SPANDATA.SERVER_PORT] = addr[1] except IndexError: pass - database = conn._params.database if database: - span.set_attribute(SPANDATA.DB_NAME, database) + data[SPANDATA.DB_NAME] = database - user = conn._params.user if user: - span.set_attribute(SPANDATA.DB_USER, user) + data[SPANDATA.DB_USER] = user + + return data + + +def _set_on_span(span: Span, data: dict[str, Any]): + for key, value in data.items(): + span.set_attribute(key, value) diff --git a/tests/integrations/asyncpg/test_asyncpg.py b/tests/integrations/asyncpg/test_asyncpg.py index adeef37d38..aae80770bf 100644 --- a/tests/integrations/asyncpg/test_asyncpg.py +++ b/tests/integrations/asyncpg/test_asyncpg.py @@ -74,7 +74,6 @@ async def _clean_pg(): async def test_connect(sentry_init, capture_events) -> None: sentry_init( integrations=[AsyncPGIntegration()], - traces_sample_rate=1.0, _experiments={"record_sql_params": True}, ) events = capture_events() @@ -97,7 +96,6 @@ async def test_connect(sentry_init, capture_events) -> None: async def test_execute(sentry_init, capture_events) -> None: sentry_init( integrations=[AsyncPGIntegration()], - traces_sample_rate=1.0, _experiments={"record_sql_params": True}, ) events = capture_events() @@ -163,7 +161,6 @@ async def test_execute(sentry_init, capture_events) -> None: async def test_execute_many(sentry_init, capture_events) -> None: sentry_init( integrations=[AsyncPGIntegration()], - traces_sample_rate=1.0, _experiments={"record_sql_params": True}, ) events = capture_events() @@ -202,7 +199,6 @@ async def test_execute_many(sentry_init, capture_events) -> None: async def test_record_params(sentry_init, capture_events) -> None: sentry_init( integrations=[AsyncPGIntegration(record_params=True)], - traces_sample_rate=1.0, _experiments={"record_sql_params": True}, ) events = capture_events() @@ -243,7 +239,6 @@ async def test_record_params(sentry_init, capture_events) -> None: async def test_cursor(sentry_init, capture_events) -> None: sentry_init( integrations=[AsyncPGIntegration()], - traces_sample_rate=1.0, _experiments={"record_sql_params": True}, ) events = capture_events() @@ -308,7 +303,6 @@ async def test_cursor(sentry_init, capture_events) -> None: async def test_cursor_manual(sentry_init, capture_events) -> None: sentry_init( integrations=[AsyncPGIntegration()], - traces_sample_rate=1.0, _experiments={"record_sql_params": True}, ) events = capture_events() @@ -375,7 +369,6 @@ async def test_cursor_manual(sentry_init, capture_events) -> None: async def test_prepared_stmt(sentry_init, capture_events) -> None: sentry_init( integrations=[AsyncPGIntegration()], - traces_sample_rate=1.0, _experiments={"record_sql_params": True}, ) events = capture_events() @@ -425,7 +418,6 @@ async def test_prepared_stmt(sentry_init, capture_events) -> None: async def test_connection_pool(sentry_init, capture_events) -> None: sentry_init( integrations=[AsyncPGIntegration()], - traces_sample_rate=1.0, _experiments={"record_sql_params": True}, ) events = capture_events() @@ -497,7 +489,7 @@ async def test_connection_pool(sentry_init, capture_events) -> None: async def test_query_source_disabled(sentry_init, capture_events): sentry_options = { "integrations": [AsyncPGIntegration()], - "enable_tracing": True, + "traces_sample_rate": 1.0, "enable_db_query_source": False, "db_query_source_threshold_ms": 0, } @@ -535,7 +527,7 @@ async def test_query_source_enabled( ): sentry_options = { "integrations": [AsyncPGIntegration()], - "enable_tracing": True, + "traces_sample_rate": 1.0, "db_query_source_threshold_ms": 0, } if enable_db_query_source is not None: @@ -571,7 +563,7 @@ async def test_query_source_enabled( async def test_query_source(sentry_init, capture_events): sentry_init( integrations=[AsyncPGIntegration()], - enable_tracing=True, + traces_sample_rate=1.0, enable_db_query_source=True, db_query_source_threshold_ms=0, ) @@ -621,7 +613,7 @@ async def test_query_source_with_module_in_search_path(sentry_init, capture_even """ sentry_init( integrations=[AsyncPGIntegration()], - enable_tracing=True, + traces_sample_rate=1.0, enable_db_query_source=True, db_query_source_threshold_ms=0, ) @@ -667,7 +659,7 @@ async def test_query_source_with_module_in_search_path(sentry_init, capture_even async def test_no_query_source_if_duration_too_short(sentry_init, capture_events): sentry_init( integrations=[AsyncPGIntegration()], - enable_tracing=True, + traces_sample_rate=1.0, enable_db_query_source=True, db_query_source_threshold_ms=100, ) @@ -714,7 +706,7 @@ def fake_record_sql_queries(*args, **kwargs): async def test_query_source_if_duration_over_threshold(sentry_init, capture_events): sentry_init( integrations=[AsyncPGIntegration()], - enable_tracing=True, + traces_sample_rate=1.0, enable_db_query_source=True, db_query_source_threshold_ms=100, ) From a2dff73f8c99395cc1c41535344fa54590b9fed2 Mon Sep 17 00:00:00 2001 From: Ivana Kellyer Date: Thu, 24 Oct 2024 12:47:32 +0200 Subject: [PATCH 074/244] Fix pymongo (#3688) --- MIGRATION_GUIDE.md | 3 ++ sentry_sdk/integrations/pymongo.py | 42 +++++++++------------ sentry_sdk/tracing.py | 17 ++++----- tests/integrations/pymongo/test_pymongo.py | 43 ++++++++++++---------- 4 files changed, 52 insertions(+), 53 deletions(-) diff --git a/MIGRATION_GUIDE.md b/MIGRATION_GUIDE.md index e64d13da4e..ccf2876375 100644 --- a/MIGRATION_GUIDE.md +++ b/MIGRATION_GUIDE.md @@ -10,6 +10,7 @@ Looking to upgrade from Sentry SDK 2.x to 3.x? Here's a comprehensive list of wh ### Changed - The SDK now supports Python 3.7 and higher. +- Transaction names can no longer contain commas and equals signs. If present, these characters will be stripped. - `sentry_sdk.start_span` now only takes keyword arguments. - `sentry_sdk.start_span` no longer takes an explicit `span` argument. - The `Span()` constructor does not accept a `hub` parameter anymore. @@ -24,6 +25,8 @@ Looking to upgrade from Sentry SDK 2.x to 3.x? Here's a comprehensive list of wh - Spans no longer have a `description`. Use `name` instead. - Dropped support for Python 3.6. +- The PyMongo integration no longer sets tags. The data is still accessible via span attributes. +- The PyMongo integration doesn't set `operation_ids` anymore. The individual IDs (`operation_id`, `request_id`, `session_id`) are now accessible as separate span attributes. - `sentry_sdk.metrics` and associated metrics APIs have been removed as Sentry no longer accepts metrics data in this form. See https://sentry.zendesk.com/hc/en-us/articles/26369339769883-Upcoming-API-Changes-to-Metrics - The experimental options `enable_metrics`, `before_emit_metric` and `metric_code_locations` have been removed. - When setting span status, the HTTP status code is no longer automatically added as a tag. diff --git a/sentry_sdk/integrations/pymongo.py b/sentry_sdk/integrations/pymongo.py index f65ad73687..f03f70606b 100644 --- a/sentry_sdk/integrations/pymongo.py +++ b/sentry_sdk/integrations/pymongo.py @@ -1,12 +1,11 @@ import copy -import json import sentry_sdk from sentry_sdk.consts import SPANSTATUS, SPANDATA, OP from sentry_sdk.integrations import DidNotEnable, Integration from sentry_sdk.scope import should_send_default_pii from sentry_sdk.tracing import Span -from sentry_sdk.utils import capture_internal_exceptions +from sentry_sdk.utils import capture_internal_exceptions, _serialize_span_attribute try: from pymongo import monitoring @@ -127,56 +126,49 @@ def started(self, event): command.pop("$clusterTime", None) command.pop("$signature", None) - tags = { - "db.name": event.database_name, + data = { + SPANDATA.DB_NAME: event.database_name, SPANDATA.DB_SYSTEM: "mongodb", SPANDATA.DB_OPERATION: event.command_name, SPANDATA.DB_MONGODB_COLLECTION: command.get(event.command_name), } try: - tags["net.peer.name"] = event.connection_id[0] - tags["net.peer.port"] = str(event.connection_id[1]) + data["net.peer.name"] = event.connection_id[0] + data["net.peer.port"] = str(event.connection_id[1]) except TypeError: pass - data = {"operation_ids": {}} # type: Dict[str, Any] - data["operation_ids"]["operation"] = event.operation_id - data["operation_ids"]["request"] = event.request_id - - data.update(_get_db_data(event)) - try: lsid = command.pop("lsid")["id"] - data["operation_ids"]["session"] = str(lsid) + data["session_id"] = str(lsid) except KeyError: pass if not should_send_default_pii(): command = _strip_pii(command) - query = json.dumps(command, default=str) + query = _serialize_span_attribute(command) span = sentry_sdk.start_span( op=OP.DB, name=query, origin=PyMongoIntegration.origin, ) - for tag, value in tags.items(): - # set the tag for backwards-compatibility. - # TODO: remove the set_tag call in the next major release! - span.set_tag(tag, value) - - span.set_data(tag, value) - - for key, value in data.items(): - span.set_data(key, value) - with capture_internal_exceptions(): sentry_sdk.add_breadcrumb( - message=query, category="query", type=OP.DB, data=tags + message=query, category="query", type=OP.DB, data=data ) + for key, value in data.items(): + span.set_attribute(key, value) + + for key, value in _get_db_data(event).items(): + span.set_attribute(key, value) + + span.set_attribute("operation_id", event.operation_id) + span.set_attribute("request_id", event.request_id) + self._ongoing_operations[self._operation_key(event)] = span.__enter__() def failed(self, event): diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py index 513368c823..9798b9c6ba 100644 --- a/sentry_sdk/tracing.py +++ b/sentry_sdk/tracing.py @@ -1189,11 +1189,6 @@ class POTelSpan: OTel span wrapper providing compatibility with the old span interface. """ - # XXX Maybe it makes sense to repurpose the existing Span class for this. - # For now I'm keeping this class separate to have a clean slate. - - # XXX The wrapper itself should have as little state as possible - def __init__( self, *, @@ -1225,14 +1220,14 @@ def __init__( # OTel timestamps have nanosecond precision start_timestamp = convert_to_otel_timestamp(start_timestamp) - self._otel_span = tracer.start_span( - name or description or op or "", start_time=start_timestamp - ) + span_name = self._sanitize_name(name or description or op or "") + self._otel_span = tracer.start_span(span_name, start_time=start_timestamp) self.origin = origin or DEFAULT_SPAN_ORIGIN self.op = op self.description = description - self.name = name + self.name = span_name + if status is not None: self.set_status(status) @@ -1602,6 +1597,10 @@ def set_context(self, key, value): self.set_attribute(f"{SentrySpanAttribute.CONTEXT}.{key}", value) + def _sanitize_name(self, name): + """No commas and equals allowed in tracestate.""" + return name.replace(",", "").replace("=", "") + if TYPE_CHECKING: diff --git a/tests/integrations/pymongo/test_pymongo.py b/tests/integrations/pymongo/test_pymongo.py index 80fe40fdcf..7ebfc1159c 100644 --- a/tests/integrations/pymongo/test_pymongo.py +++ b/tests/integrations/pymongo/test_pymongo.py @@ -1,3 +1,5 @@ +import re + from sentry_sdk import capture_message, start_transaction from sentry_sdk.consts import SPANDATA from sentry_sdk.integrations.pymongo import PyMongoIntegration, _strip_pii @@ -49,7 +51,7 @@ def test_transactions(sentry_init, capture_events, mongo_server, with_pii): (event,) = events (find, insert_success, insert_fail) = event["spans"] - common_tags = { + common_data = { "db.name": "test_db", "db.system": "mongodb", "net.peer.name": mongo_server.host, @@ -60,8 +62,7 @@ def test_transactions(sentry_init, capture_events, mongo_server, with_pii): assert span["data"][SPANDATA.DB_NAME] == "test_db" assert span["data"][SPANDATA.SERVER_ADDRESS] == "localhost" assert span["data"][SPANDATA.SERVER_PORT] == mongo_server.port - for field, value in common_tags.items(): - assert span["tags"][field] == value + for field, value in common_data.items(): assert span["data"][field] == value assert find["op"] == "db" @@ -69,22 +70,16 @@ def test_transactions(sentry_init, capture_events, mongo_server, with_pii): assert insert_fail["op"] == "db" assert find["data"]["db.operation"] == "find" - assert find["tags"]["db.operation"] == "find" assert insert_success["data"]["db.operation"] == "insert" - assert insert_success["tags"]["db.operation"] == "insert" assert insert_fail["data"]["db.operation"] == "insert" - assert insert_fail["tags"]["db.operation"] == "insert" assert find["description"].startswith('{"find') - assert insert_success["description"].startswith('{"insert') - assert insert_fail["description"].startswith('{"insert') + assert re.match("^{['\"]insert.*", insert_success["description"]) + assert re.match("^{['\"]insert.*", insert_fail["description"]) assert find["data"][SPANDATA.DB_MONGODB_COLLECTION] == "test_collection" - assert find["tags"][SPANDATA.DB_MONGODB_COLLECTION] == "test_collection" assert insert_success["data"][SPANDATA.DB_MONGODB_COLLECTION] == "test_collection" - assert insert_success["tags"][SPANDATA.DB_MONGODB_COLLECTION] == "test_collection" assert insert_fail["data"][SPANDATA.DB_MONGODB_COLLECTION] == "erroneous" - assert insert_fail["tags"][SPANDATA.DB_MONGODB_COLLECTION] == "erroneous" if with_pii: assert "1" in find["description"] assert "2" in insert_success["description"] @@ -99,16 +94,22 @@ def test_transactions(sentry_init, capture_events, mongo_server, with_pii): and "4" not in insert_fail["description"] ) - assert find["tags"]["status"] == "ok" - assert insert_success["tags"]["status"] == "ok" - assert insert_fail["tags"]["status"] == "internal_error" - -@pytest.mark.parametrize("with_pii", [False, True]) -def test_breadcrumbs(sentry_init, capture_events, mongo_server, with_pii): +@pytest.mark.parametrize( + "with_pii,traces_sample_rate", + [ + [False, 0.0], + [False, 1.0], + [True, 0.0], + [True, 1.0], + ], +) +def test_breadcrumbs( + sentry_init, capture_events, mongo_server, with_pii, traces_sample_rate +): sentry_init( integrations=[PyMongoIntegration()], - traces_sample_rate=1.0, + traces_sample_rate=traces_sample_rate, send_default_pii=with_pii, ) events = capture_events() @@ -120,7 +121,11 @@ def test_breadcrumbs(sentry_init, capture_events, mongo_server, with_pii): ) # force query execution capture_message("hi") - (event,) = events + if traces_sample_rate: + event = events[1] + else: + event = events[0] + (crumb,) = event["breadcrumbs"]["values"] assert crumb["category"] == "query" From 43ca6f37b46527fb3d8d8a8fd70f908ec2d5e552 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Thu, 24 Oct 2024 16:06:50 +0200 Subject: [PATCH 075/244] Fix breadcrumbs in HTTP clients (#3683) This moves the creation of breadcrumbs for outgoing HTTP requests from the `maybe_create_breadcrumbs_from_span` into the integrations. --- sentry_sdk/integrations/aiohttp.py | 25 +++++++++-- sentry_sdk/integrations/boto3.py | 32 ++++++++++++-- sentry_sdk/integrations/httpx.py | 44 +++++++++++++++---- .../integrations/redis/_async_common.py | 1 - sentry_sdk/integrations/redis/_sync_common.py | 1 - .../integrations/redis/modules/caches.py | 1 - .../integrations/redis/modules/queries.py | 1 - .../integrations/redis/redis_cluster.py | 1 - sentry_sdk/integrations/stdlib.py | 25 +++++++++-- sentry_sdk/tracing_utils.py | 8 +--- tests/integrations/boto3/test_s3.py | 31 +++++++++++++ tests/integrations/redis/test_redis.py | 4 +- 12 files changed, 141 insertions(+), 33 deletions(-) diff --git a/sentry_sdk/integrations/aiohttp.py b/sentry_sdk/integrations/aiohttp.py index 8edf921303..36c1d807d2 100644 --- a/sentry_sdk/integrations/aiohttp.py +++ b/sentry_sdk/integrations/aiohttp.py @@ -229,11 +229,17 @@ async def on_request_start(session, trace_config_ctx, params): % (method, parsed_url.url if parsed_url else SENSITIVE_DATA_SUBSTITUTE), origin=AioHttpIntegration.origin, ) - span.set_data(SPANDATA.HTTP_METHOD, method) + + data = { + SPANDATA.HTTP_METHOD: method, + } if parsed_url is not None: - span.set_data("url", parsed_url.url) - span.set_data(SPANDATA.HTTP_QUERY, parsed_url.query) - span.set_data(SPANDATA.HTTP_FRAGMENT, parsed_url.fragment) + data["url"] = parsed_url.url + data[SPANDATA.HTTP_QUERY] = parsed_url.query + data[SPANDATA.HTTP_FRAGMENT] = parsed_url.fragment + + for key, value in data.items(): + span.set_data(key, value) client = sentry_sdk.get_client() @@ -258,12 +264,23 @@ async def on_request_start(session, trace_config_ctx, params): params.headers[key] = value trace_config_ctx.span = span + trace_config_ctx.span_data = data async def on_request_end(session, trace_config_ctx, params): # type: (ClientSession, SimpleNamespace, TraceRequestEndParams) -> None if trace_config_ctx.span is None: return + span_data = trace_config_ctx.span_data or {} + span_data[SPANDATA.HTTP_STATUS_CODE] = int(params.response.status) + span_data["reason"] = params.response.reason + + sentry_sdk.add_breadcrumb( + type="http", + category="httplib", + data=span_data, + ) + span = trace_config_ctx.span span.set_http_status(int(params.response.status)) span.set_data("reason", params.response.reason) diff --git a/sentry_sdk/integrations/boto3.py b/sentry_sdk/integrations/boto3.py index 86201d9959..195b532e54 100644 --- a/sentry_sdk/integrations/boto3.py +++ b/sentry_sdk/integrations/boto3.py @@ -74,15 +74,20 @@ def _sentry_request_created(service_id, request, operation_name, **kwargs): origin=Boto3Integration.origin, ) + data = { + SPANDATA.HTTP_METHOD: request.method, + } with capture_internal_exceptions(): parsed_url = parse_url(request.url, sanitize=False) - span.set_data("aws.request.url", parsed_url.url) - span.set_data(SPANDATA.HTTP_QUERY, parsed_url.query) - span.set_data(SPANDATA.HTTP_FRAGMENT, parsed_url.fragment) + data["aws.request.url"] = parsed_url.url + data[SPANDATA.HTTP_QUERY] = parsed_url.query + data[SPANDATA.HTTP_FRAGMENT] = parsed_url.fragment + + for key, value in data.items(): + span.set_data(key, value) span.set_tag("aws.service_id", service_id) span.set_tag("aws.operation_name", operation_name) - span.set_data(SPANDATA.HTTP_METHOD, request.method) # We do it in order for subsequent http calls/retries be # attached to this span. @@ -91,6 +96,7 @@ def _sentry_request_created(service_id, request, operation_name, **kwargs): # request.context is an open-ended data-structure # where we can add anything useful in request life cycle. request.context["_sentrysdk_span"] = span + request.context["_sentrysdk_span_data"] = data def _sentry_after_call(context, parsed, **kwargs): @@ -100,6 +106,15 @@ def _sentry_after_call(context, parsed, **kwargs): # Span could be absent if the integration is disabled. if span is None: return + + span_data = context.pop("_sentrysdk_span_data", {}) + + sentry_sdk.add_breadcrumb( + type="http", + category="httplib", + data=span_data, + ) + span.__exit__(None, None, None) body = parsed.get("Body") @@ -143,4 +158,13 @@ def _sentry_after_call_error(context, exception, **kwargs): # Span could be absent if the integration is disabled. if span is None: return + + span_data = context.pop("_sentrysdk_span_data", {}) + + sentry_sdk.add_breadcrumb( + type="http", + category="httplib", + data=span_data, + ) + span.__exit__(type(exception), exception, None) diff --git a/sentry_sdk/integrations/httpx.py b/sentry_sdk/integrations/httpx.py index 6f80b93f4d..c4d8e4b4dc 100644 --- a/sentry_sdk/integrations/httpx.py +++ b/sentry_sdk/integrations/httpx.py @@ -60,11 +60,16 @@ def send(self, request, **kwargs): ), origin=HttpxIntegration.origin, ) as span: - span.set_data(SPANDATA.HTTP_METHOD, request.method) + data = { + SPANDATA.HTTP_METHOD: request.method, + } if parsed_url is not None: - span.set_data("url", parsed_url.url) - span.set_data(SPANDATA.HTTP_QUERY, parsed_url.query) - span.set_data(SPANDATA.HTTP_FRAGMENT, parsed_url.fragment) + data["url"] = parsed_url.url + data[SPANDATA.HTTP_QUERY] = parsed_url.query + data[SPANDATA.HTTP_FRAGMENT] = parsed_url.fragment + + for key, value in data.items(): + span.set_data(key, value) if should_propagate_trace(sentry_sdk.get_client(), str(request.url)): for ( @@ -89,6 +94,15 @@ def send(self, request, **kwargs): span.set_http_status(rv.status_code) span.set_data("reason", rv.reason_phrase) + data[SPANDATA.HTTP_STATUS_CODE] = rv.status_code + data["reason"] = rv.reason_phrase + + sentry_sdk.add_breadcrumb( + type="http", + category="httplib", + data=data, + ) + return rv Client.send = send @@ -116,11 +130,16 @@ async def send(self, request, **kwargs): ), origin=HttpxIntegration.origin, ) as span: - span.set_data(SPANDATA.HTTP_METHOD, request.method) + data = { + SPANDATA.HTTP_METHOD: request.method, + } if parsed_url is not None: - span.set_data("url", parsed_url.url) - span.set_data(SPANDATA.HTTP_QUERY, parsed_url.query) - span.set_data(SPANDATA.HTTP_FRAGMENT, parsed_url.fragment) + data["url"] = parsed_url.url + data[SPANDATA.HTTP_QUERY] = parsed_url.query + data[SPANDATA.HTTP_FRAGMENT] = parsed_url.fragment + + for key, value in data.items(): + span.set_data(key, value) if should_propagate_trace(sentry_sdk.get_client(), str(request.url)): for ( @@ -145,6 +164,15 @@ async def send(self, request, **kwargs): span.set_http_status(rv.status_code) span.set_data("reason", rv.reason_phrase) + data[SPANDATA.HTTP_STATUS_CODE] = rv.status_code + data["reason"] = rv.reason_phrase + + sentry_sdk.add_breadcrumb( + type="http", + category="httplib", + data=data, + ) + return rv AsyncClient.send = send diff --git a/sentry_sdk/integrations/redis/_async_common.py b/sentry_sdk/integrations/redis/_async_common.py index 6a136fe29a..f835affbf3 100644 --- a/sentry_sdk/integrations/redis/_async_common.py +++ b/sentry_sdk/integrations/redis/_async_common.py @@ -12,7 +12,6 @@ _get_pipeline_data, _update_span, ) -from sentry_sdk.tracing import Span from sentry_sdk.utils import capture_internal_exceptions from typing import TYPE_CHECKING diff --git a/sentry_sdk/integrations/redis/_sync_common.py b/sentry_sdk/integrations/redis/_sync_common.py index f4cb6ee1b8..9c96ad61d1 100644 --- a/sentry_sdk/integrations/redis/_sync_common.py +++ b/sentry_sdk/integrations/redis/_sync_common.py @@ -12,7 +12,6 @@ _get_pipeline_data, _update_span, ) -from sentry_sdk.tracing import Span from sentry_sdk.utils import capture_internal_exceptions from typing import TYPE_CHECKING diff --git a/sentry_sdk/integrations/redis/modules/caches.py b/sentry_sdk/integrations/redis/modules/caches.py index d93e729f2b..4ab33d2ea8 100644 --- a/sentry_sdk/integrations/redis/modules/caches.py +++ b/sentry_sdk/integrations/redis/modules/caches.py @@ -13,7 +13,6 @@ if TYPE_CHECKING: from sentry_sdk.integrations.redis import RedisIntegration - from sentry_sdk.tracing import Span from typing import Any, Optional diff --git a/sentry_sdk/integrations/redis/modules/queries.py b/sentry_sdk/integrations/redis/modules/queries.py index e2189b7f9c..c070893ac8 100644 --- a/sentry_sdk/integrations/redis/modules/queries.py +++ b/sentry_sdk/integrations/redis/modules/queries.py @@ -11,7 +11,6 @@ if TYPE_CHECKING: from redis import Redis from sentry_sdk.integrations.redis import RedisIntegration - from sentry_sdk.tracing import Span from typing import Any diff --git a/sentry_sdk/integrations/redis/redis_cluster.py b/sentry_sdk/integrations/redis/redis_cluster.py index dbcd20a65d..7975e21083 100644 --- a/sentry_sdk/integrations/redis/redis_cluster.py +++ b/sentry_sdk/integrations/redis/redis_cluster.py @@ -23,7 +23,6 @@ RedisCluster as AsyncRedisCluster, ClusterPipeline as AsyncClusterPipeline, ) - from sentry_sdk.tracing import Span def _get_async_cluster_db_data(async_redis_cluster_instance): diff --git a/sentry_sdk/integrations/stdlib.py b/sentry_sdk/integrations/stdlib.py index 8e038d6d3b..8d5ed06386 100644 --- a/sentry_sdk/integrations/stdlib.py +++ b/sentry_sdk/integrations/stdlib.py @@ -95,11 +95,17 @@ def putrequest(self, method, url, *args, **kwargs): % (method, parsed_url.url if parsed_url else SENSITIVE_DATA_SUBSTITUTE), origin="auto.http.stdlib.httplib", ) - span.set_data(SPANDATA.HTTP_METHOD, method) + + data = { + SPANDATA.HTTP_METHOD: method, + } if parsed_url is not None: - span.set_data("url", parsed_url.url) - span.set_data(SPANDATA.HTTP_QUERY, parsed_url.query) - span.set_data(SPANDATA.HTTP_FRAGMENT, parsed_url.fragment) + data["url"] = parsed_url.url + data[SPANDATA.HTTP_QUERY] = parsed_url.query + data[SPANDATA.HTTP_FRAGMENT] = parsed_url.fragment + + for key, value in data.items(): + span.set_data(key, value) rv = real_putrequest(self, method, url, *args, **kwargs) @@ -118,6 +124,7 @@ def putrequest(self, method, url, *args, **kwargs): self.putheader(key, value) self._sentrysdk_span = span # type: ignore[attr-defined] + self._sentrysdk_span_data = data # type: ignore[attr-defined] return rv @@ -130,6 +137,16 @@ def getresponse(self, *args, **kwargs): rv = real_getresponse(self, *args, **kwargs) + span_data = getattr(self, "_sentrysdk_span_data", {}) + span_data[SPANDATA.HTTP_STATUS_CODE] = int(rv.status) + span_data["reason"] = rv.reason + + sentry_sdk.add_breadcrumb( + type="http", + category="httplib", + data=span_data, + ) + span.set_http_status(int(rv.status)) span.set_data("reason", rv.reason) span.finish() diff --git a/sentry_sdk/tracing_utils.py b/sentry_sdk/tracing_utils.py index 2c6e7f2a7a..b9d4fb9335 100644 --- a/sentry_sdk/tracing_utils.py +++ b/sentry_sdk/tracing_utils.py @@ -157,12 +157,8 @@ def record_sql_queries( def maybe_create_breadcrumbs_from_span(scope, span): # type: (sentry_sdk.Scope, sentry_sdk.tracing.Span) -> None - if span.op == OP.HTTP_CLIENT: - scope.add_breadcrumb( - type="http", - category="httplib", - data=span._data, - ) + # TODO: can be removed when POtelSpan replaces Span + pass def _get_frame_module_abs_path(frame): diff --git a/tests/integrations/boto3/test_s3.py b/tests/integrations/boto3/test_s3.py index 97a1543b0f..668e8349b6 100644 --- a/tests/integrations/boto3/test_s3.py +++ b/tests/integrations/boto3/test_s3.py @@ -39,6 +39,37 @@ def test_basic(sentry_init, capture_events): assert span["description"] == "aws.s3.ListObjects" +def test_breadcrumb(sentry_init, capture_events): + sentry_init(traces_sample_rate=1.0, integrations=[Boto3Integration()]) + events = capture_events() + + try: + s3 = session.resource("s3") + with sentry_sdk.start_transaction(), MockResponse( + s3.meta.client, 200, {}, read_fixture("s3_list.xml") + ): + bucket = s3.Bucket("bucket") + # read bucket (this makes http request) + [obj for obj in bucket.objects.all()] + 1 / 0 + except Exception as e: + sentry_sdk.capture_exception(e) + + (_, event) = events + crumb = event["breadcrumbs"]["values"][0] + assert crumb == { + "type": "http", + "category": "httplib", + "data": { + "http.method": "GET", + "aws.request.url": "https://bucket.s3.amazonaws.com/", + "http.query": "encoding-type=url", + "http.fragment": "", + }, + "timestamp": mock.ANY, + } + + def test_streaming(sentry_init, capture_events): sentry_init(traces_sample_rate=1.0, integrations=[Boto3Integration()]) events = capture_events() diff --git a/tests/integrations/redis/test_redis.py b/tests/integrations/redis/test_redis.py index f8225bed79..0fec23f273 100644 --- a/tests/integrations/redis/test_redis.py +++ b/tests/integrations/redis/test_redis.py @@ -186,7 +186,7 @@ def test_data_truncation(sentry_init, capture_events, render_span_tree): - op="": description=null - op="db.redis": description="SET 'somekey1' '{long_string[: 1024 - len("...") - len("SET 'somekey1' '")]}..." - op="db.redis": description="SET 'somekey2' 'bbbbbbbbbb'"\ -""" +""" # noqa: E221 ) @@ -212,7 +212,7 @@ def test_data_truncation_custom(sentry_init, capture_events, render_span_tree): - op="": description=null - op="db.redis": description="SET 'somekey1' '{long_string[: 30 - len("...") - len("SET 'somekey1' '")]}..." - op="db.redis": description="SET 'somekey2' '{short_string}'"\ -""" +""" # noqa: E221 ) From 8b875c156863e973cda139a53902cc455295ccc5 Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Mon, 28 Oct 2024 14:28:30 +0100 Subject: [PATCH 076/244] Fix broken sampler after backpressure change (#3702) --- sentry_sdk/integrations/opentelemetry/sampler.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/sentry_sdk/integrations/opentelemetry/sampler.py b/sentry_sdk/integrations/opentelemetry/sampler.py index b5c500b3f3..e4c17a4aab 100644 --- a/sentry_sdk/integrations/opentelemetry/sampler.py +++ b/sentry_sdk/integrations/opentelemetry/sampler.py @@ -68,10 +68,11 @@ def dropped_result(parent_span_context, attributes, sample_rate=None): else: reason = "sample_rate" - client.transport.record_lost_event(reason, data_category="transaction") + if client.transport and has_tracing_enabled(client.options): + client.transport.record_lost_event(reason, data_category="transaction") - # Only one span (the transaction itself) is discarded, since we did not record any spans here. - client.transport.record_lost_event(reason, data_category="span") + # Only one span (the transaction itself) is discarded, since we did not record any spans here. + client.transport.record_lost_event(reason, data_category="span") return SamplingResult( Decision.DROP, From 2c26f425a0c5070c8d183946f6722d9b1b0877a2 Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Mon, 28 Oct 2024 15:56:17 +0100 Subject: [PATCH 077/244] Make profiling work with potel (#3704) --- .../opentelemetry/potel_span_processor.py | 52 ++++++++++++++++++- .../integrations/opentelemetry/sampler.py | 1 + .../integrations/opentelemetry/utils.py | 14 ++++- sentry_sdk/tracing.py | 13 ++--- 4 files changed, 69 insertions(+), 11 deletions(-) diff --git a/sentry_sdk/integrations/opentelemetry/potel_span_processor.py b/sentry_sdk/integrations/opentelemetry/potel_span_processor.py index 383234cba4..1d27642d1e 100644 --- a/sentry_sdk/integrations/opentelemetry/potel_span_processor.py +++ b/sentry_sdk/integrations/opentelemetry/potel_span_processor.py @@ -12,7 +12,14 @@ from opentelemetry.sdk.trace import Span, ReadableSpan, SpanProcessor from sentry_sdk import capture_event +from sentry_sdk.consts import SPANDATA from sentry_sdk.tracing import DEFAULT_SPAN_ORIGIN +from sentry_sdk.utils import get_current_thread_meta +from sentry_sdk.profiler.continuous_profiler import ( + try_autostart_continuous_profiler, + get_profiler_id, +) +from sentry_sdk.profiler.transaction_profiler import Profile from sentry_sdk.integrations.opentelemetry.utils import ( is_sentry_span, convert_from_otel_timestamp, @@ -20,6 +27,7 @@ extract_span_data, extract_transaction_name_source, get_trace_context, + get_profile_context, get_sentry_meta, set_sentry_meta, ) @@ -54,8 +62,11 @@ def __init__(self): def on_start(self, span, parent_context=None): # type: (Span, Optional[Context]) -> None - if not is_sentry_span(span): - self._add_root_span(span, get_current_span(parent_context)) + if is_sentry_span(span): + return + + self._add_root_span(span, get_current_span(parent_context)) + self._start_profile(span) def on_end(self, span): # type: (ReadableSpan) -> None @@ -94,6 +105,32 @@ def _add_root_span(self, span, parent_span): # root span points to itself set_sentry_meta(span, "root_span", span) + def _start_profile(self, span): + # type: (Span) -> None + try_autostart_continuous_profiler() + profiler_id = get_profiler_id() + thread_id, thread_name = get_current_thread_meta() + + if profiler_id: + span.set_attribute(SPANDATA.PROFILER_ID, profiler_id) + if thread_id: + span.set_attribute(SPANDATA.THREAD_ID, str(thread_id)) + if thread_name: + span.set_attribute(SPANDATA.THREAD_NAME, thread_name) + + is_root_span = not span.parent or span.parent.is_remote + sampled = span.context and span.context.trace_flags.sampled + + if is_root_span and sampled: + # profiler uses time.perf_counter_ns() so we cannot use the + # unix timestamp that is on span.start_time + # setting it to 0 means the profiler will internally measure time on start + profile = Profile(sampled, 0) + # TODO-neel-potel sampling context?? + profile._set_initial_sampling_decision(sampling_context={}) + profile.__enter__() + set_sentry_meta(span, "profile", profile) + def _flush_root_span(self, span): # type: (ReadableSpan) -> None transaction_event = self._root_span_to_transaction_event(span) @@ -147,6 +184,10 @@ def _root_span_to_transaction_event(self, span): trace_context = get_trace_context(span, span_data=span_data) contexts = {"trace": trace_context} + profile_context = get_profile_context(span) + if profile_context: + contexts["profile"] = profile_context + if http_status: contexts["response"] = {"status_code": http_status} @@ -162,6 +203,13 @@ def _root_span_to_transaction_event(self, span): } ) + profile = cast("Optional[Profile]", get_sentry_meta(span, "profile")) + if profile: + profile.__exit__(None, None, None) + if profile.valid(): + event["profile"] = profile + set_sentry_meta(span, "profile", None) + return event def _span_to_json(self, span): diff --git a/sentry_sdk/integrations/opentelemetry/sampler.py b/sentry_sdk/integrations/opentelemetry/sampler.py index e4c17a4aab..ed8ca36ebd 100644 --- a/sentry_sdk/integrations/opentelemetry/sampler.py +++ b/sentry_sdk/integrations/opentelemetry/sampler.py @@ -128,6 +128,7 @@ def should_sample( has_traces_sampler = callable(client.options.get("traces_sampler")) if has_traces_sampler: # TODO-anton: Make proper sampling_context + # TODO-neel-potel: Make proper sampling_context sampling_context = { "transaction_context": { "name": name, diff --git a/sentry_sdk/integrations/opentelemetry/utils.py b/sentry_sdk/integrations/opentelemetry/utils.py index d274f4e887..51faf41c0c 100644 --- a/sentry_sdk/integrations/opentelemetry/utils.py +++ b/sentry_sdk/integrations/opentelemetry/utils.py @@ -17,7 +17,7 @@ import sentry_sdk from sentry_sdk.utils import Dsn -from sentry_sdk.consts import SPANSTATUS, OP +from sentry_sdk.consts import SPANSTATUS, OP, SPANDATA from sentry_sdk.tracing import get_span_status_from_http_code, DEFAULT_SPAN_ORIGIN from sentry_sdk.tracing_utils import Baggage, LOW_QUALITY_TRANSACTION_SOURCES from sentry_sdk.integrations.opentelemetry.consts import SentrySpanAttribute @@ -432,3 +432,15 @@ def set_sentry_meta(span, key, value): sentry_meta = getattr(span, "_sentry_meta", {}) sentry_meta[key] = value span._sentry_meta = sentry_meta + + +def get_profile_context(span): + # type: (ReadableSpan) -> Optional[dict[str, str]] + if not span.attributes: + return None + + profiler_id = cast("Optional[str]", span.attributes.get(SPANDATA.PROFILER_ID)) + if profiler_id is None: + return None + + return {"profiler_id": profiler_id} diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py index 72f5a79065..eeea307c9c 100644 --- a/sentry_sdk/tracing.py +++ b/sentry_sdk/tracing.py @@ -1549,10 +1549,10 @@ def set_thread(self, thread_id, thread_name): if thread_name is not None: self.set_data(SPANDATA.THREAD_NAME, thread_name) - def set_profiler_id(self, profiler_id): - # type: (Optional[str]) -> None - if profiler_id is not None: - self.set_data(SPANDATA.PROFILER_ID, profiler_id) + def update_active_thread(self): + # type: () -> None + thread_id, thread_name = get_current_thread_meta() + self.set_thread(thread_id, thread_name) def set_http_status(self, http_status): # type: (int) -> None @@ -1576,6 +1576,7 @@ def finish(self, end_timestamp=None): def to_json(self): # type: () -> dict[str, Any] + # TODO-neel-potel for sampling context pass def get_trace_context(self): @@ -1589,10 +1590,6 @@ def get_trace_context(self): return get_trace_context(self._otel_span) - def get_profile_context(self): - # type: () -> Optional[ProfileContext] - pass - def set_context(self, key, value): # type: (str, Any) -> None from sentry_sdk.integrations.opentelemetry.consts import SentrySpanAttribute From 49a6d1d49a2c2ca8da0bf0895d579f1186353160 Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Mon, 28 Oct 2024 16:17:59 +0100 Subject: [PATCH 078/244] Remove name sanitization for POTelSpan and quote trace state populated values (#3705) --- sentry_sdk/integrations/opentelemetry/utils.py | 12 +++++++----- sentry_sdk/tracing.py | 6 +----- 2 files changed, 8 insertions(+), 10 deletions(-) diff --git a/sentry_sdk/integrations/opentelemetry/utils.py b/sentry_sdk/integrations/opentelemetry/utils.py index 51faf41c0c..544362a87c 100644 --- a/sentry_sdk/integrations/opentelemetry/utils.py +++ b/sentry_sdk/integrations/opentelemetry/utils.py @@ -386,22 +386,24 @@ def get_trace_state(span): options = client.options or {} trace_state = trace_state.update( - Baggage.SENTRY_PREFIX + "trace_id", format_trace_id(span_context.trace_id) + Baggage.SENTRY_PREFIX + "trace_id", + quote(format_trace_id(span_context.trace_id)), ) if options.get("environment"): trace_state = trace_state.update( - Baggage.SENTRY_PREFIX + "environment", options["environment"] + Baggage.SENTRY_PREFIX + "environment", quote(options["environment"]) ) if options.get("release"): trace_state = trace_state.update( - Baggage.SENTRY_PREFIX + "release", options["release"] + Baggage.SENTRY_PREFIX + "release", quote(options["release"]) ) if options.get("dsn"): trace_state = trace_state.update( - Baggage.SENTRY_PREFIX + "public_key", Dsn(options["dsn"]).public_key + Baggage.SENTRY_PREFIX + "public_key", + quote(Dsn(options["dsn"]).public_key), ) root_span = get_sentry_meta(span, "root_span") @@ -415,7 +417,7 @@ def get_trace_state(span): and transaction_source not in LOW_QUALITY_TRANSACTION_SOURCES ): trace_state = trace_state.update( - Baggage.SENTRY_PREFIX + "transaction", transaction_name + Baggage.SENTRY_PREFIX + "transaction", quote(transaction_name) ) return trace_state diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py index eeea307c9c..fd76c50b50 100644 --- a/sentry_sdk/tracing.py +++ b/sentry_sdk/tracing.py @@ -1224,7 +1224,7 @@ def __init__( # OTel timestamps have nanosecond precision start_timestamp = convert_to_otel_timestamp(start_timestamp) - span_name = self._sanitize_name(name or description or op or "") + span_name = name or description or op or "" self._otel_span = tracer.start_span(span_name, start_time=start_timestamp) self.origin = origin or DEFAULT_SPAN_ORIGIN @@ -1598,10 +1598,6 @@ def set_context(self, key, value): self.set_attribute(f"{SentrySpanAttribute.CONTEXT}.{key}", value) - def _sanitize_name(self, name): - """No commas and equals allowed in tracestate.""" - return name.replace(",", "").replace("=", "") - if TYPE_CHECKING: From 8f87ada9beb06f275b905d0c634f788d91e24959 Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Mon, 28 Oct 2024 16:18:54 +0100 Subject: [PATCH 079/244] Fix migration guide for commas in tx names --- MIGRATION_GUIDE.md | 1 - 1 file changed, 1 deletion(-) diff --git a/MIGRATION_GUIDE.md b/MIGRATION_GUIDE.md index ccf2876375..5658ee04fd 100644 --- a/MIGRATION_GUIDE.md +++ b/MIGRATION_GUIDE.md @@ -10,7 +10,6 @@ Looking to upgrade from Sentry SDK 2.x to 3.x? Here's a comprehensive list of wh ### Changed - The SDK now supports Python 3.7 and higher. -- Transaction names can no longer contain commas and equals signs. If present, these characters will be stripped. - `sentry_sdk.start_span` now only takes keyword arguments. - `sentry_sdk.start_span` no longer takes an explicit `span` argument. - The `Span()` constructor does not accept a `hub` parameter anymore. From fae6713f5acb43804197f55569bb256522d166e2 Mon Sep 17 00:00:00 2001 From: Daniel Szoke Date: Tue, 5 Nov 2024 09:43:58 +0100 Subject: [PATCH 080/244] ref(init): Deprecate `sentry_sdk.init` context manager It is possible to use the return value of `sentry_sdk.init` as a context manager; however, this functionality has not been maintained for a long time, and it does not seem to be documented anywhere. So, we are deprecating this functionality, and we will remove it in the next major release. Closes #3282 --- sentry_sdk/_init_implementation.py | 21 +++++++++++++++++++++ tests/test_api.py | 17 +++++++++++++++++ 2 files changed, 38 insertions(+) diff --git a/sentry_sdk/_init_implementation.py b/sentry_sdk/_init_implementation.py index 256a69ee83..eb02b3d11e 100644 --- a/sentry_sdk/_init_implementation.py +++ b/sentry_sdk/_init_implementation.py @@ -1,3 +1,5 @@ +import warnings + from typing import TYPE_CHECKING import sentry_sdk @@ -9,16 +11,35 @@ class _InitGuard: + _CONTEXT_MANAGER_DEPRECATION_WARNING_MESSAGE = ( + "Using the return value of sentry_sdk.init as a context manager " + "and manually calling the __enter__ and __exit__ methods on the " + "return value are deprecated. We are no longer maintaining this " + "functionality, and we will remove it in the next major release." + ) + def __init__(self, client): # type: (sentry_sdk.Client) -> None self._client = client def __enter__(self): # type: () -> _InitGuard + warnings.warn( + self._CONTEXT_MANAGER_DEPRECATION_WARNING_MESSAGE, + stacklevel=2, + category=DeprecationWarning, + ) + return self def __exit__(self, exc_type, exc_value, tb): # type: (Any, Any, Any) -> None + warnings.warn( + self._CONTEXT_MANAGER_DEPRECATION_WARNING_MESSAGE, + stacklevel=2, + category=DeprecationWarning, + ) + c = self._client if c is not None: c.close() diff --git a/tests/test_api.py b/tests/test_api.py index 46fc24fd24..0fdafa9f5d 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -1,6 +1,7 @@ import pytest from unittest import mock +import sentry_sdk from sentry_sdk import ( capture_exception, continue_trace, @@ -181,3 +182,19 @@ def test_set_tags(sentry_init, capture_events): "tag2": "updated", "tag3": "new", }, "Updating tags with empty dict changed tags" + + +def test_init_context_manager_deprecation(): + with pytest.warns(DeprecationWarning): + with sentry_sdk.init(): + ... + + +def test_init_enter_deprecation(): + with pytest.warns(DeprecationWarning): + sentry_sdk.init().__enter__() + + +def test_init_exit_deprecation(): + with pytest.warns(DeprecationWarning): + sentry_sdk.init().__exit__(None, None, None) From e7218dacb34630f00c8d1c80692303752b7b7a67 Mon Sep 17 00:00:00 2001 From: Daniel Szoke Date: Tue, 5 Nov 2024 10:17:05 +0100 Subject: [PATCH 081/244] feat(init): Remove `sentry_sdk.init` context manager BREAKING CHANGE: `sentry_sdk.init` now returns `None` instead of a context manager. --- MIGRATION_GUIDE.md | 3 +- sentry_sdk/_init_implementation.py | 45 ++---------------------------- tests/test_api.py | 17 ----------- 3 files changed, 4 insertions(+), 61 deletions(-) diff --git a/MIGRATION_GUIDE.md b/MIGRATION_GUIDE.md index 5658ee04fd..495bfff75e 100644 --- a/MIGRATION_GUIDE.md +++ b/MIGRATION_GUIDE.md @@ -1,6 +1,5 @@ # Sentry SDK Migration Guide - ## Upgrading to 3.0 Looking to upgrade from Sentry SDK 2.x to 3.x? Here's a comprehensive list of what's changed. Looking for a more digestible summary? See the [guide in the docs](https://docs.sentry.io/platforms/python/migration/2.x-to-3.x) with the most common migration patterns. @@ -19,6 +18,7 @@ Looking to upgrade from Sentry SDK 2.x to 3.x? Here's a comprehensive list of wh - `sentry_sdk.continue_trace` no longer returns a `Transaction` and is now a context manager. - Redis integration: In Redis pipeline spans there is no `span["data"]["redis.commands"]` that contains a dict `{"count": 3, "first_ten": ["cmd1", "cmd2", ...]}` but instead `span["data"]["redis.commands.count"]` (containing `3`) and `span["data"]["redis.commands.first_ten"]` (containing `["cmd1", "cmd2", ...]`). - clickhouse-driver integration: The query is now available under the `db.query.text` span attribute (only if `send_default_pii` is `True`). +- `sentry_sdk.init` now returns `None` instead of a context manager. ### Removed @@ -40,7 +40,6 @@ Looking to upgrade from Sentry SDK 2.x to 3.x? Here's a comprehensive list of wh - `continue_from_headers`, `continue_from_environ` and `from_traceparent` have been removed, please use top-level API `sentry_sdk.continue_trace` instead. - `PropagationContext` constructor no longer takes a `dynamic_sampling_context` but takes a `baggage` object instead. - ### Deprecated - `sentry_sdk.start_transaction` is deprecated. Use `sentry_sdk.start_span` instead. diff --git a/sentry_sdk/_init_implementation.py b/sentry_sdk/_init_implementation.py index eb02b3d11e..dc235af243 100644 --- a/sentry_sdk/_init_implementation.py +++ b/sentry_sdk/_init_implementation.py @@ -1,50 +1,13 @@ -import warnings - from typing import TYPE_CHECKING import sentry_sdk if TYPE_CHECKING: - from typing import Any, ContextManager, Optional + from typing import Any, Optional import sentry_sdk.consts -class _InitGuard: - _CONTEXT_MANAGER_DEPRECATION_WARNING_MESSAGE = ( - "Using the return value of sentry_sdk.init as a context manager " - "and manually calling the __enter__ and __exit__ methods on the " - "return value are deprecated. We are no longer maintaining this " - "functionality, and we will remove it in the next major release." - ) - - def __init__(self, client): - # type: (sentry_sdk.Client) -> None - self._client = client - - def __enter__(self): - # type: () -> _InitGuard - warnings.warn( - self._CONTEXT_MANAGER_DEPRECATION_WARNING_MESSAGE, - stacklevel=2, - category=DeprecationWarning, - ) - - return self - - def __exit__(self, exc_type, exc_value, tb): - # type: (Any, Any, Any) -> None - warnings.warn( - self._CONTEXT_MANAGER_DEPRECATION_WARNING_MESSAGE, - stacklevel=2, - category=DeprecationWarning, - ) - - c = self._client - if c is not None: - c.close() - - def _check_python_deprecations(): # type: () -> None # Since we're likely to deprecate Python versions in the future, I'm keeping @@ -54,7 +17,7 @@ def _check_python_deprecations(): def _init(*args, **kwargs): - # type: (*Optional[str], **Any) -> ContextManager[Any] + # type: (*Optional[str], **Any) -> None """Initializes the SDK and optionally integrations. This takes the same arguments as the client constructor. @@ -62,8 +25,6 @@ def _init(*args, **kwargs): client = sentry_sdk.Client(*args, **kwargs) sentry_sdk.get_global_scope().set_client(client) _check_python_deprecations() - rv = _InitGuard(client) - return rv if TYPE_CHECKING: @@ -73,7 +34,7 @@ def _init(*args, **kwargs): # Use `ClientConstructor` to define the argument types of `init` and # `ContextManager[Any]` to tell static analyzers about the return type. - class init(sentry_sdk.consts.ClientConstructor, _InitGuard): # noqa: N801 + class init(sentry_sdk.consts.ClientConstructor): # noqa: N801 pass else: diff --git a/tests/test_api.py b/tests/test_api.py index 0fdafa9f5d..46fc24fd24 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -1,7 +1,6 @@ import pytest from unittest import mock -import sentry_sdk from sentry_sdk import ( capture_exception, continue_trace, @@ -182,19 +181,3 @@ def test_set_tags(sentry_init, capture_events): "tag2": "updated", "tag3": "new", }, "Updating tags with empty dict changed tags" - - -def test_init_context_manager_deprecation(): - with pytest.warns(DeprecationWarning): - with sentry_sdk.init(): - ... - - -def test_init_enter_deprecation(): - with pytest.warns(DeprecationWarning): - sentry_sdk.init().__enter__() - - -def test_init_exit_deprecation(): - with pytest.warns(DeprecationWarning): - sentry_sdk.init().__exit__(None, None, None) From 0ffc5cdb9beafd975c93b182955a09a64c1b3e38 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Tue, 5 Nov 2024 13:53:28 +0100 Subject: [PATCH 082/244] Make Anthropic integration work with Potel (#3686) --- sentry_sdk/integrations/anthropic.py | 12 +++- .../integrations/anthropic/test_anthropic.py | 63 +++++++++++-------- 2 files changed, 47 insertions(+), 28 deletions(-) diff --git a/sentry_sdk/integrations/anthropic.py b/sentry_sdk/integrations/anthropic.py index 87e69a3113..a41005ed20 100644 --- a/sentry_sdk/integrations/anthropic.py +++ b/sentry_sdk/integrations/anthropic.py @@ -7,6 +7,7 @@ from sentry_sdk.integrations import DidNotEnable, Integration from sentry_sdk.scope import should_send_default_pii from sentry_sdk.utils import ( + _serialize_span_attribute, capture_internal_exceptions, event_from_exception, package_version, @@ -126,7 +127,7 @@ def _add_ai_data_to_span( complete_message = "".join(content_blocks) span.set_data( SPANDATA.AI_RESPONSES, - [{"type": "text", "text": complete_message}], + _serialize_span_attribute([{"type": "text", "text": complete_message}]), ) total_tokens = input_tokens + output_tokens record_token_usage(span, input_tokens, output_tokens, total_tokens) @@ -165,11 +166,16 @@ def _sentry_patched_create_common(f, *args, **kwargs): span.set_data(SPANDATA.AI_STREAMING, False) if should_send_default_pii() and integration.include_prompts: - span.set_data(SPANDATA.AI_INPUT_MESSAGES, messages) + span.set_data( + SPANDATA.AI_INPUT_MESSAGES, _serialize_span_attribute(messages) + ) if hasattr(result, "content"): if should_send_default_pii() and integration.include_prompts: - span.set_data(SPANDATA.AI_RESPONSES, _get_responses(result.content)) + span.set_data( + SPANDATA.AI_RESPONSES, + _serialize_span_attribute(_get_responses(result.content)), + ) _calculate_token_usage(result, span) span.__exit__(None, None, None) diff --git a/tests/integrations/anthropic/test_anthropic.py b/tests/integrations/anthropic/test_anthropic.py index 8ce12e70f5..4a7d7ed458 100644 --- a/tests/integrations/anthropic/test_anthropic.py +++ b/tests/integrations/anthropic/test_anthropic.py @@ -19,7 +19,7 @@ async def __call__(self, *args, **kwargs): from anthropic.types.message_delta_event import MessageDeltaEvent from anthropic.types.message_start_event import MessageStartEvent -from sentry_sdk.utils import package_version +from sentry_sdk.utils import _serialize_span_attribute, package_version try: from anthropic.types import InputJSONDelta @@ -115,10 +115,12 @@ def test_nonstreaming_create_message( assert span["data"][SPANDATA.AI_MODEL_ID] == "model" if send_default_pii and include_prompts: - assert span["data"][SPANDATA.AI_INPUT_MESSAGES] == messages - assert span["data"][SPANDATA.AI_RESPONSES] == [ - {"type": "text", "text": "Hi, I'm Claude."} - ] + assert span["data"][SPANDATA.AI_INPUT_MESSAGES] == _serialize_span_attribute( + messages + ) + assert span["data"][SPANDATA.AI_RESPONSES] == _serialize_span_attribute( + [{"type": "text", "text": "Hi, I'm Claude."}] + ) else: assert SPANDATA.AI_INPUT_MESSAGES not in span["data"] assert SPANDATA.AI_RESPONSES not in span["data"] @@ -183,10 +185,12 @@ async def test_nonstreaming_create_message_async( assert span["data"][SPANDATA.AI_MODEL_ID] == "model" if send_default_pii and include_prompts: - assert span["data"][SPANDATA.AI_INPUT_MESSAGES] == messages - assert span["data"][SPANDATA.AI_RESPONSES] == [ - {"type": "text", "text": "Hi, I'm Claude."} - ] + assert span["data"][SPANDATA.AI_INPUT_MESSAGES] == _serialize_span_attribute( + messages + ) + assert span["data"][SPANDATA.AI_RESPONSES] == _serialize_span_attribute( + [{"type": "text", "text": "Hi, I'm Claude."}] + ) else: assert SPANDATA.AI_INPUT_MESSAGES not in span["data"] assert SPANDATA.AI_RESPONSES not in span["data"] @@ -282,10 +286,12 @@ def test_streaming_create_message( assert span["data"][SPANDATA.AI_MODEL_ID] == "model" if send_default_pii and include_prompts: - assert span["data"][SPANDATA.AI_INPUT_MESSAGES] == messages - assert span["data"][SPANDATA.AI_RESPONSES] == [ - {"type": "text", "text": "Hi! I'm Claude!"} - ] + assert span["data"][SPANDATA.AI_INPUT_MESSAGES] == _serialize_span_attribute( + messages + ) + assert span["data"][SPANDATA.AI_RESPONSES] == _serialize_span_attribute( + [{"type": "text", "text": "Hi! I'm Claude!"}] + ) else: assert SPANDATA.AI_INPUT_MESSAGES not in span["data"] @@ -385,10 +391,12 @@ async def test_streaming_create_message_async( assert span["data"][SPANDATA.AI_MODEL_ID] == "model" if send_default_pii and include_prompts: - assert span["data"][SPANDATA.AI_INPUT_MESSAGES] == messages - assert span["data"][SPANDATA.AI_RESPONSES] == [ - {"type": "text", "text": "Hi! I'm Claude!"} - ] + assert span["data"][SPANDATA.AI_INPUT_MESSAGES] == _serialize_span_attribute( + messages + ) + assert span["data"][SPANDATA.AI_RESPONSES] == _serialize_span_attribute( + [{"type": "text", "text": "Hi! I'm Claude!"}] + ) else: assert SPANDATA.AI_INPUT_MESSAGES not in span["data"] @@ -515,10 +523,12 @@ def test_streaming_create_message_with_input_json_delta( assert span["data"][SPANDATA.AI_MODEL_ID] == "model" if send_default_pii and include_prompts: - assert span["data"][SPANDATA.AI_INPUT_MESSAGES] == messages - assert span["data"][SPANDATA.AI_RESPONSES] == [ - {"text": "", "type": "text"} - ] # we do not record InputJSONDelta because it could contain PII + assert span["data"][SPANDATA.AI_INPUT_MESSAGES] == _serialize_span_attribute( + messages + ) + assert span["data"][SPANDATA.AI_RESPONSES] == _serialize_span_attribute( + [{"type": "text", "text": ""}] + ) # we do not record InputJSONDelta because it could contain PII else: assert SPANDATA.AI_INPUT_MESSAGES not in span["data"] @@ -652,10 +662,12 @@ async def test_streaming_create_message_with_input_json_delta_async( assert span["data"][SPANDATA.AI_MODEL_ID] == "model" if send_default_pii and include_prompts: - assert span["data"][SPANDATA.AI_INPUT_MESSAGES] == messages - assert span["data"][SPANDATA.AI_RESPONSES] == [ - {"text": "", "type": "text"} - ] # we do not record InputJSONDelta because it could contain PII + assert span["data"][SPANDATA.AI_INPUT_MESSAGES] == _serialize_span_attribute( + messages + ) + assert span["data"][SPANDATA.AI_RESPONSES] == _serialize_span_attribute( + [{"type": "text", "text": ""}] + ) # we do not record InputJSONDelta because it could contain PII else: assert SPANDATA.AI_INPUT_MESSAGES not in span["data"] @@ -667,6 +679,7 @@ async def test_streaming_create_message_with_input_json_delta_async( assert span["data"]["ai.streaming"] is True +@pytest.mark.forked def test_exception_message_create(sentry_init, capture_events): sentry_init(integrations=[AnthropicIntegration()], traces_sample_rate=1.0) events = capture_events() From 453d15254d7334bd217d717da2f8d3cde4e94290 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Tue, 5 Nov 2024 14:16:49 +0100 Subject: [PATCH 083/244] Moved _serialize_span_attribute into set_attribute (#3732) --- sentry_sdk/integrations/anthropic.py | 12 +++--------- sentry_sdk/tracing.py | 3 ++- sentry_sdk/tracing_utils.py | 7 +++---- 3 files changed, 8 insertions(+), 14 deletions(-) diff --git a/sentry_sdk/integrations/anthropic.py b/sentry_sdk/integrations/anthropic.py index a41005ed20..87e69a3113 100644 --- a/sentry_sdk/integrations/anthropic.py +++ b/sentry_sdk/integrations/anthropic.py @@ -7,7 +7,6 @@ from sentry_sdk.integrations import DidNotEnable, Integration from sentry_sdk.scope import should_send_default_pii from sentry_sdk.utils import ( - _serialize_span_attribute, capture_internal_exceptions, event_from_exception, package_version, @@ -127,7 +126,7 @@ def _add_ai_data_to_span( complete_message = "".join(content_blocks) span.set_data( SPANDATA.AI_RESPONSES, - _serialize_span_attribute([{"type": "text", "text": complete_message}]), + [{"type": "text", "text": complete_message}], ) total_tokens = input_tokens + output_tokens record_token_usage(span, input_tokens, output_tokens, total_tokens) @@ -166,16 +165,11 @@ def _sentry_patched_create_common(f, *args, **kwargs): span.set_data(SPANDATA.AI_STREAMING, False) if should_send_default_pii() and integration.include_prompts: - span.set_data( - SPANDATA.AI_INPUT_MESSAGES, _serialize_span_attribute(messages) - ) + span.set_data(SPANDATA.AI_INPUT_MESSAGES, messages) if hasattr(result, "content"): if should_send_default_pii() and integration.include_prompts: - span.set_data( - SPANDATA.AI_RESPONSES, - _serialize_span_attribute(_get_responses(result.content)), - ) + span.set_data(SPANDATA.AI_RESPONSES, _get_responses(result.content)) _calculate_token_usage(result, span) span.__exit__(None, None, None) diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py index fd76c50b50..31312c9ad3 100644 --- a/sentry_sdk/tracing.py +++ b/sentry_sdk/tracing.py @@ -18,6 +18,7 @@ from sentry_sdk.consts import SPANSTATUS, SPANDATA from sentry_sdk.profiler.continuous_profiler import get_profiler_id from sentry_sdk.utils import ( + _serialize_span_attribute, get_current_thread_meta, is_valid_sample_rate, logger, @@ -1519,7 +1520,7 @@ def get_attribute(self, name): def set_attribute(self, key, value): # type: (str, Any) -> None - self._otel_span.set_attribute(key, value) + self._otel_span.set_attribute(key, _serialize_span_attribute(value)) def set_status(self, status): # type: (str) -> None diff --git a/sentry_sdk/tracing_utils.py b/sentry_sdk/tracing_utils.py index b9d4fb9335..fc701e27b7 100644 --- a/sentry_sdk/tracing_utils.py +++ b/sentry_sdk/tracing_utils.py @@ -23,7 +23,6 @@ _is_external_source, _is_in_project_root, _module_in_list, - _serialize_span_attribute, ) from typing import TYPE_CHECKING @@ -134,13 +133,13 @@ def record_sql_queries( data = {} if params_list is not None: - data["db.params"] = _serialize_span_attribute(params_list) + data["db.params"] = params_list if paramstyle is not None: - data["db.paramstyle"] = _serialize_span_attribute(paramstyle) + data["db.paramstyle"] = paramstyle if executemany: data["db.executemany"] = True if record_cursor_repr and cursor is not None: - data["db.cursor"] = _serialize_span_attribute(cursor) + data["db.cursor"] = cursor with capture_internal_exceptions(): sentry_sdk.add_breadcrumb(message=query, category="query", data=data) From 64ae2ef54a631e2487d75dd78327f362ad4a590d Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Tue, 5 Nov 2024 14:19:15 +0100 Subject: [PATCH 084/244] Fix Cohere tests (#3731) --- tests/integrations/cohere/test_cohere.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/tests/integrations/cohere/test_cohere.py b/tests/integrations/cohere/test_cohere.py index c0dff2214e..20371029d5 100644 --- a/tests/integrations/cohere/test_cohere.py +++ b/tests/integrations/cohere/test_cohere.py @@ -56,8 +56,9 @@ def test_nonstreaming_chat( assert span["data"]["ai.model_id"] == "some-model" if send_default_pii and include_prompts: - assert "some context" in span["data"]["ai.input_messages"][0]["content"] - assert "hello" in span["data"]["ai.input_messages"][1]["content"] + input_messages = json.loads(span["data"]["ai.input_messages"]) + assert "some context" in input_messages[0]["content"] + assert "hello" in input_messages[1]["content"] assert "the model response" in span["data"]["ai.responses"] else: assert "ai.input_messages" not in span["data"] @@ -127,8 +128,9 @@ def test_streaming_chat(sentry_init, capture_events, send_default_pii, include_p assert span["data"]["ai.model_id"] == "some-model" if send_default_pii and include_prompts: - assert "some context" in span["data"]["ai.input_messages"][0]["content"] - assert "hello" in span["data"]["ai.input_messages"][1]["content"] + input_messages = json.loads(span["data"]["ai.input_messages"]) + assert "some context" in input_messages[0]["content"] + assert "hello" in input_messages[1]["content"] assert "the model response" in span["data"]["ai.responses"] else: assert "ai.input_messages" not in span["data"] @@ -150,7 +152,7 @@ def test_bad_chat(sentry_init, capture_events): with pytest.raises(httpx.HTTPError): client.chat(model="some-model", message="hello") - (event,) = events + (event, _) = events assert event["level"] == "error" From fb66e1a8c759f7bd4a7c31635fea3af4e137b5c8 Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Tue, 5 Nov 2024 15:27:19 +0100 Subject: [PATCH 085/244] Fix WSGI tests (#3734) * Don't default to OK span status for UNSET, this breaks older behavior * Set transaction name manually on scope after isolation (we should generally do this everywhere) --- .../integrations/opentelemetry/utils.py | 6 +++-- sentry_sdk/integrations/wsgi.py | 10 ++++--- .../integrations/opentelemetry/test_utils.py | 26 +++++++++---------- 3 files changed, 23 insertions(+), 19 deletions(-) diff --git a/sentry_sdk/integrations/opentelemetry/utils.py b/sentry_sdk/integrations/opentelemetry/utils.py index 544362a87c..07e60ddd0f 100644 --- a/sentry_sdk/integrations/opentelemetry/utils.py +++ b/sentry_sdk/integrations/opentelemetry/utils.py @@ -234,7 +234,7 @@ def extract_span_status(span): return (inferred_status, http_status) if status and status.status_code == StatusCode.UNSET: - return (SPANSTATUS.OK, None) + return (None, None) else: return (SPANSTATUS.UNKNOWN_ERROR, None) @@ -308,9 +308,11 @@ def get_trace_context(span, span_data=None): "parent_span_id": parent_span_id, "op": op, "origin": origin or DEFAULT_SPAN_ORIGIN, - "status": status, } # type: dict[str, Any] + if status: + trace_context["status"] = status + if span.attributes: trace_context["data"] = dict(span.attributes) diff --git a/sentry_sdk/integrations/wsgi.py b/sentry_sdk/integrations/wsgi.py index 843cf56564..72ecfdb758 100644 --- a/sentry_sdk/integrations/wsgi.py +++ b/sentry_sdk/integrations/wsgi.py @@ -46,6 +46,8 @@ def __call__(self, status, response_headers, exc_info=None): # type: ignore _wsgi_middleware_applied = ContextVar("sentry_wsgi_middleware_applied") +DEFAULT_TRANSACTION_NAME = "generic WSGI request" + def wsgi_decoding_dance(s, charset="utf-8", errors="replace"): # type: (str, str, str) -> str @@ -96,6 +98,8 @@ def __call__(self, environ, start_response): _wsgi_middleware_applied.set(True) try: with sentry_sdk.isolation_scope() as scope: + scope.set_transaction_name(DEFAULT_TRANSACTION_NAME, source=TRANSACTION_SOURCE_ROUTE) + with track_session(scope, session_mode="request"): with capture_internal_exceptions(): scope.clear_breadcrumbs() @@ -105,15 +109,13 @@ def __call__(self, environ, start_response): environ, self.use_x_forwarded_for ) ) - method = environ.get("REQUEST_METHOD", "").upper() should_trace = method in self.http_methods_to_capture with sentry_sdk.continue_trace(environ): with ( - sentry_sdk.start_transaction( - environ, + sentry_sdk.start_span( op=OP.HTTP_SERVER, - name="generic WSGI request", + name=DEFAULT_TRANSACTION_NAME, source=TRANSACTION_SOURCE_ROUTE, origin=self.span_origin, custom_sampling_context={"wsgi_environ": environ}, diff --git a/tests/integrations/opentelemetry/test_utils.py b/tests/integrations/opentelemetry/test_utils.py index 66ffd7898a..b6f1072480 100644 --- a/tests/integrations/opentelemetry/test_utils.py +++ b/tests/integrations/opentelemetry/test_utils.py @@ -16,54 +16,54 @@ [ ( "OTel Span Blank", - Status(StatusCode.UNSET), # Unset defaults to OK + Status(StatusCode.UNSET), {}, { "op": "OTel Span Blank", "description": "OTel Span Blank", - "status": "ok", + "status": None, "http_status_code": None, "origin": None, }, ), ( "OTel Span RPC", - Status(StatusCode.UNSET), # Unset defaults to OK + Status(StatusCode.UNSET), { "rpc.service": "myservice.EchoService", }, { "op": "rpc", "description": "OTel Span RPC", - "status": "ok", + "status": None, "http_status_code": None, "origin": None, }, ), ( "OTel Span Messaging", - Status(StatusCode.UNSET), # Unset defaults to OK + Status(StatusCode.UNSET), { "messaging.system": "rabbitmq", }, { "op": "message", "description": "OTel Span Messaging", - "status": "ok", + "status": None, "http_status_code": None, "origin": None, }, ), ( "OTel Span FaaS", - Status(StatusCode.UNSET), # Unset defaults to OK + Status(StatusCode.UNSET), { "faas.trigger": "pubsub", }, { "op": "pubsub", "description": "OTel Span FaaS", - "status": "ok", + "status": None, "http_status_code": None, "origin": None, }, @@ -241,13 +241,13 @@ def test_span_data_for_db_query(): ), ( SpanKind.SERVER, - Status(StatusCode.UNSET), # Unset defaults to OK + Status(StatusCode.UNSET), { "http.method": "POST", "http.route": "/some/route", }, { - "status": "ok", + "status": None, "http_status_code": None, }, ), @@ -336,15 +336,15 @@ def test_span_data_for_db_query(): SpanKind.SERVER, Status( StatusCode.ERROR, "I'm a teapot" - ), # Error status with unknown description is an unknown error + ), { "http.method": "POST", "http.route": "/some/route", "http.response.status_code": 418, }, { - "status": "unknown_error", - "http_status_code": None, + "status": "invalid_argument", + "http_status_code": 418, }, ), ( From e850655c01317f4be558c918dcad8f010ef17b24 Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Tue, 5 Nov 2024 15:28:49 +0100 Subject: [PATCH 086/244] Fix ASGI tests (#3737) --- sentry_sdk/integrations/asgi.py | 40 ++++++++++++++++++++------------- sentry_sdk/integrations/wsgi.py | 4 +++- sentry_sdk/tracing.py | 8 ++++--- 3 files changed, 33 insertions(+), 19 deletions(-) diff --git a/sentry_sdk/integrations/asgi.py b/sentry_sdk/integrations/asgi.py index 61bc20fd40..f67c47ef02 100644 --- a/sentry_sdk/integrations/asgi.py +++ b/sentry_sdk/integrations/asgi.py @@ -176,6 +176,18 @@ async def _run_app(self, scope, receive, send, asgi_version): _asgi_middleware_applied.set(True) try: with sentry_sdk.isolation_scope() as sentry_scope: + ( + transaction_name, + transaction_source, + ) = self._get_transaction_name_and_source( + self.transaction_style, + scope, + ) + sentry_scope.set_transaction_name( + transaction_name, + source=transaction_source, + ) + with track_session(sentry_scope, session_mode="request"): sentry_scope.clear_breadcrumbs() sentry_scope._name = "asgi" @@ -183,19 +195,12 @@ async def _run_app(self, scope, receive, send, asgi_version): sentry_scope.add_event_processor(processor) ty = scope["type"] - ( - transaction_name, - transaction_source, - ) = self._get_transaction_name_and_source( - self.transaction_style, - scope, - ) method = scope.get("method", "").upper() should_trace = method in self.http_methods_to_capture with sentry_sdk.continue_trace(_get_headers(scope)): with ( - sentry_sdk.start_transaction( + sentry_sdk.start_span( op=( OP.WEBSOCKET_SERVER if ty == "websocket" @@ -251,13 +256,18 @@ def event_processor(self, event, hint, asgi_scope): event["request"] = deepcopy(request_data) # Only set transaction name if not already set by Starlette or FastAPI (or other frameworks) - already_set = event["transaction"] != _DEFAULT_TRANSACTION_NAME and event[ - "transaction_info" - ].get("source") in [ - TRANSACTION_SOURCE_COMPONENT, - TRANSACTION_SOURCE_ROUTE, - TRANSACTION_SOURCE_CUSTOM, - ] + already_set = ( + "transaction" in event + and event["transaction"] != _DEFAULT_TRANSACTION_NAME + and "transaction_info" in event + and "source" in event["transaction_info"] + and event["transaction_info"]["source"] + in [ + TRANSACTION_SOURCE_COMPONENT, + TRANSACTION_SOURCE_ROUTE, + TRANSACTION_SOURCE_CUSTOM, + ] + ) if not already_set: name, source = self._get_transaction_name_and_source( self.transaction_style, asgi_scope diff --git a/sentry_sdk/integrations/wsgi.py b/sentry_sdk/integrations/wsgi.py index 72ecfdb758..3aebff17d5 100644 --- a/sentry_sdk/integrations/wsgi.py +++ b/sentry_sdk/integrations/wsgi.py @@ -98,7 +98,9 @@ def __call__(self, environ, start_response): _wsgi_middleware_applied.set(True) try: with sentry_sdk.isolation_scope() as scope: - scope.set_transaction_name(DEFAULT_TRANSACTION_NAME, source=TRANSACTION_SOURCE_ROUTE) + scope.set_transaction_name( + DEFAULT_TRANSACTION_NAME, source=TRANSACTION_SOURCE_ROUTE + ) with track_session(scope, session_mode="request"): with capture_internal_exceptions(): diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py index 31312c9ad3..c2bd5734c5 100644 --- a/sentry_sdk/tracing.py +++ b/sentry_sdk/tracing.py @@ -683,9 +683,9 @@ def get_trace_context(self): rv["status"] = self.status if self.containing_transaction: - rv["dynamic_sampling_context"] = ( - self.containing_transaction.get_baggage().dynamic_sampling_context() - ) + rv[ + "dynamic_sampling_context" + ] = self.containing_transaction.get_baggage().dynamic_sampling_context() data = {} @@ -1203,6 +1203,7 @@ def __init__( start_timestamp=None, # type: Optional[Union[datetime, float]] origin=None, # type: Optional[str] name=None, # type: Optional[str] + source=TRANSACTION_SOURCE_CUSTOM, # type: str otel_span=None, # type: Optional[OtelSpan] **_, # type: dict[str, object] ): @@ -1232,6 +1233,7 @@ def __init__( self.op = op self.description = description self.name = span_name + self.source = source if status is not None: self.set_status(status) From 632dc685cbaab1e547c8d192da0373c1cf3bb4eb Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Mon, 11 Nov 2024 10:39:34 +0100 Subject: [PATCH 087/244] Add `only_if_parent` option to POTelSpan and use it in integrations (#3748) If this option is on, we will only create a new underlying otel span if there's an active valid parent, otherwise we will just return an invalid `NonRecordingSpan` (`INVALID_SPAN`). All internal integration child `start_span` calls have been modified so that now we will only create spans if there is an active root span (transaction) active. --- sentry_sdk/ai/monitoring.py | 8 ++- sentry_sdk/integrations/aiohttp.py | 1 + sentry_sdk/integrations/anthropic.py | 1 + sentry_sdk/integrations/arq.py | 5 +- sentry_sdk/integrations/asyncio.py | 1 + sentry_sdk/integrations/asyncpg.py | 1 + sentry_sdk/integrations/boto3.py | 1 + sentry_sdk/integrations/celery/__init__.py | 1 + sentry_sdk/integrations/clickhouse_driver.py | 1 + sentry_sdk/integrations/cohere.py | 2 + sentry_sdk/integrations/django/__init__.py | 1 + sentry_sdk/integrations/django/asgi.py | 1 + sentry_sdk/integrations/django/caching.py | 1 + sentry_sdk/integrations/django/middleware.py | 1 + .../integrations/django/signals_handlers.py | 1 + sentry_sdk/integrations/django/templates.py | 2 + sentry_sdk/integrations/django/views.py | 2 + sentry_sdk/integrations/graphene.py | 4 +- sentry_sdk/integrations/grpc/aio/client.py | 2 + sentry_sdk/integrations/grpc/client.py | 2 + sentry_sdk/integrations/httpx.py | 2 + sentry_sdk/integrations/huey.py | 1 + sentry_sdk/integrations/huggingface_hub.py | 1 + sentry_sdk/integrations/langchain.py | 4 +- sentry_sdk/integrations/litestar.py | 3 ++ sentry_sdk/integrations/openai.py | 2 + sentry_sdk/integrations/pymongo.py | 1 + sentry_sdk/integrations/ray.py | 1 + .../integrations/redis/_async_common.py | 3 ++ sentry_sdk/integrations/redis/_sync_common.py | 1 + sentry_sdk/integrations/socket.py | 2 + sentry_sdk/integrations/starlette.py | 3 ++ sentry_sdk/integrations/starlite.py | 3 ++ sentry_sdk/integrations/stdlib.py | 4 ++ sentry_sdk/integrations/strawberry.py | 1 + sentry_sdk/tracing.py | 50 ++++++++++++------- 36 files changed, 98 insertions(+), 23 deletions(-) diff --git a/sentry_sdk/ai/monitoring.py b/sentry_sdk/ai/monitoring.py index 860833b8f5..e149ebe7df 100644 --- a/sentry_sdk/ai/monitoring.py +++ b/sentry_sdk/ai/monitoring.py @@ -33,7 +33,9 @@ def sync_wrapped(*args, **kwargs): curr_pipeline = _ai_pipeline_name.get() op = span_kwargs.get("op", "ai.run" if curr_pipeline else "ai.pipeline") - with start_span(name=description, op=op, **span_kwargs) as span: + with start_span( + name=description, op=op, only_if_parent=True, **span_kwargs + ) as span: for k, v in kwargs.pop("sentry_tags", {}).items(): span.set_tag(k, v) for k, v in kwargs.pop("sentry_data", {}).items(): @@ -62,7 +64,9 @@ async def async_wrapped(*args, **kwargs): curr_pipeline = _ai_pipeline_name.get() op = span_kwargs.get("op", "ai.run" if curr_pipeline else "ai.pipeline") - with start_span(name=description, op=op, **span_kwargs) as span: + with start_span( + name=description, op=op, only_if_parent=True, **span_kwargs + ) as span: for k, v in kwargs.pop("sentry_tags", {}).items(): span.set_tag(k, v) for k, v in kwargs.pop("sentry_data", {}).items(): diff --git a/sentry_sdk/integrations/aiohttp.py b/sentry_sdk/integrations/aiohttp.py index 36c1d807d2..0928c14c8b 100644 --- a/sentry_sdk/integrations/aiohttp.py +++ b/sentry_sdk/integrations/aiohttp.py @@ -228,6 +228,7 @@ async def on_request_start(session, trace_config_ctx, params): name="%s %s" % (method, parsed_url.url if parsed_url else SENSITIVE_DATA_SUBSTITUTE), origin=AioHttpIntegration.origin, + only_if_parent=True, ) data = { diff --git a/sentry_sdk/integrations/anthropic.py b/sentry_sdk/integrations/anthropic.py index 87e69a3113..d128ad7d25 100644 --- a/sentry_sdk/integrations/anthropic.py +++ b/sentry_sdk/integrations/anthropic.py @@ -151,6 +151,7 @@ def _sentry_patched_create_common(f, *args, **kwargs): op=OP.ANTHROPIC_MESSAGES_CREATE, description="Anthropic messages create", origin=AnthropicIntegration.origin, + only_if_parent=True, ) span.__enter__() diff --git a/sentry_sdk/integrations/arq.py b/sentry_sdk/integrations/arq.py index 4640204725..345e030fb1 100644 --- a/sentry_sdk/integrations/arq.py +++ b/sentry_sdk/integrations/arq.py @@ -79,7 +79,10 @@ async def _sentry_enqueue_job(self, function, *args, **kwargs): return await old_enqueue_job(self, function, *args, **kwargs) with sentry_sdk.start_span( - op=OP.QUEUE_SUBMIT_ARQ, name=function, origin=ArqIntegration.origin + op=OP.QUEUE_SUBMIT_ARQ, + name=function, + origin=ArqIntegration.origin, + only_if_parent=True, ): return await old_enqueue_job(self, function, *args, **kwargs) diff --git a/sentry_sdk/integrations/asyncio.py b/sentry_sdk/integrations/asyncio.py index 7021d7fceb..d9bdf4a592 100644 --- a/sentry_sdk/integrations/asyncio.py +++ b/sentry_sdk/integrations/asyncio.py @@ -48,6 +48,7 @@ async def _coro_creating_hub_and_span(): op=OP.FUNCTION, name=get_name(coro), origin=AsyncioIntegration.origin, + only_if_parent=True, ): try: result = await coro diff --git a/sentry_sdk/integrations/asyncpg.py b/sentry_sdk/integrations/asyncpg.py index 1113fd199c..7d1654398a 100644 --- a/sentry_sdk/integrations/asyncpg.py +++ b/sentry_sdk/integrations/asyncpg.py @@ -169,6 +169,7 @@ async def _inner(*args: Any, **kwargs: Any) -> T: op=OP.DB, name="connect", origin=AsyncPGIntegration.origin, + only_if_parent=True, ) as span: data = _get_db_data( addr=kwargs.get("addr"), diff --git a/sentry_sdk/integrations/boto3.py b/sentry_sdk/integrations/boto3.py index 195b532e54..dfea7459c3 100644 --- a/sentry_sdk/integrations/boto3.py +++ b/sentry_sdk/integrations/boto3.py @@ -72,6 +72,7 @@ def _sentry_request_created(service_id, request, operation_name, **kwargs): op=OP.HTTP_CLIENT, name=description, origin=Boto3Integration.origin, + only_if_parent=True, ) data = { diff --git a/sentry_sdk/integrations/celery/__init__.py b/sentry_sdk/integrations/celery/__init__.py index 104a6755a7..e2ee4de532 100644 --- a/sentry_sdk/integrations/celery/__init__.py +++ b/sentry_sdk/integrations/celery/__init__.py @@ -495,6 +495,7 @@ def sentry_publish(self, *args, **kwargs): op=OP.QUEUE_PUBLISH, name=task_name, origin=CeleryIntegration.origin, + only_if_parent=True, ) as span: if task_id is not None: span.set_data(SPANDATA.MESSAGING_MESSAGE_ID, task_id) diff --git a/sentry_sdk/integrations/clickhouse_driver.py b/sentry_sdk/integrations/clickhouse_driver.py index 245ea0ef71..014db14b68 100644 --- a/sentry_sdk/integrations/clickhouse_driver.py +++ b/sentry_sdk/integrations/clickhouse_driver.py @@ -89,6 +89,7 @@ def _inner(*args: P.args, **kwargs: P.kwargs) -> T: op=OP.DB, name=query, origin=ClickhouseDriverIntegration.origin, + only_if_parent=True, ) connection._sentry_span = span # type: ignore[attr-defined] diff --git a/sentry_sdk/integrations/cohere.py b/sentry_sdk/integrations/cohere.py index b4c2af91da..a80ccb19b3 100644 --- a/sentry_sdk/integrations/cohere.py +++ b/sentry_sdk/integrations/cohere.py @@ -147,6 +147,7 @@ def new_chat(*args, **kwargs): op=consts.OP.COHERE_CHAT_COMPLETIONS_CREATE, name="cohere.client.Chat", origin=CohereIntegration.origin, + only_if_parent=True, ) span.__enter__() try: @@ -233,6 +234,7 @@ def new_embed(*args, **kwargs): op=consts.OP.COHERE_EMBEDDINGS_CREATE, name="Cohere Embedding Creation", origin=CohereIntegration.origin, + only_if_parent=True, ) as span: if "texts" in kwargs and ( should_send_default_pii() and integration.include_prompts diff --git a/sentry_sdk/integrations/django/__init__.py b/sentry_sdk/integrations/django/__init__.py index ba2cc6a0ad..2fea384ac9 100644 --- a/sentry_sdk/integrations/django/__init__.py +++ b/sentry_sdk/integrations/django/__init__.py @@ -687,6 +687,7 @@ def connect(self): op=OP.DB, name="connect", origin=DjangoIntegration.origin_db, + only_if_parent=True, ) as span: _set_db_data(span, self) return real_connect(self) diff --git a/sentry_sdk/integrations/django/asgi.py b/sentry_sdk/integrations/django/asgi.py index 73a25acc9f..daa1498c58 100644 --- a/sentry_sdk/integrations/django/asgi.py +++ b/sentry_sdk/integrations/django/asgi.py @@ -184,6 +184,7 @@ async def sentry_wrapped_callback(request, *args, **kwargs): op=OP.VIEW_RENDER, name=request.resolver_match.view_name, origin=DjangoIntegration.origin, + only_if_parent=True, ): return await callback(request, *args, **kwargs) diff --git a/sentry_sdk/integrations/django/caching.py b/sentry_sdk/integrations/django/caching.py index 4bd7cb7236..aa60993155 100644 --- a/sentry_sdk/integrations/django/caching.py +++ b/sentry_sdk/integrations/django/caching.py @@ -54,6 +54,7 @@ def _instrument_call( op=op, name=description, origin=DjangoIntegration.origin, + only_if_parent=True, ) as span: value = original_method(*args, **kwargs) diff --git a/sentry_sdk/integrations/django/middleware.py b/sentry_sdk/integrations/django/middleware.py index 245276566e..6640ac2919 100644 --- a/sentry_sdk/integrations/django/middleware.py +++ b/sentry_sdk/integrations/django/middleware.py @@ -89,6 +89,7 @@ def _check_middleware_span(old_method): op=OP.MIDDLEWARE_DJANGO, name=description, origin=DjangoIntegration.origin, + only_if_parent=True, ) middleware_span.set_tag("django.function_name", function_name) middleware_span.set_tag("django.middleware_name", middleware_name) diff --git a/sentry_sdk/integrations/django/signals_handlers.py b/sentry_sdk/integrations/django/signals_handlers.py index cb0f8b9d2e..ae948cec2a 100644 --- a/sentry_sdk/integrations/django/signals_handlers.py +++ b/sentry_sdk/integrations/django/signals_handlers.py @@ -68,6 +68,7 @@ def wrapper(*args, **kwargs): op=OP.EVENT_DJANGO, name=signal_name, origin=DjangoIntegration.origin, + only_if_parent=True, ) as span: span.set_data("signal", signal_name) return receiver(*args, **kwargs) diff --git a/sentry_sdk/integrations/django/templates.py b/sentry_sdk/integrations/django/templates.py index f5309c9cf3..c9e41e24a0 100644 --- a/sentry_sdk/integrations/django/templates.py +++ b/sentry_sdk/integrations/django/templates.py @@ -72,6 +72,7 @@ def rendered_content(self): op=OP.TEMPLATE_RENDER, name=_get_template_name_description(self.template_name), origin=DjangoIntegration.origin, + only_if_parent=True, ) as span: if isinstance(self.context_data, dict): for k, v in self.context_data.items(): @@ -102,6 +103,7 @@ def render(request, template_name, context=None, *args, **kwargs): op=OP.TEMPLATE_RENDER, name=_get_template_name_description(template_name), origin=DjangoIntegration.origin, + only_if_parent=True, ) as span: for k, v in context.items(): span.set_data(f"context.{k}", v) diff --git a/sentry_sdk/integrations/django/views.py b/sentry_sdk/integrations/django/views.py index 0a9861a6a6..e8dfa8abb6 100644 --- a/sentry_sdk/integrations/django/views.py +++ b/sentry_sdk/integrations/django/views.py @@ -37,6 +37,7 @@ def sentry_patched_render(self): op=OP.VIEW_RESPONSE_RENDER, name="serialize response", origin=DjangoIntegration.origin, + only_if_parent=True, ): return old_render(self) @@ -90,6 +91,7 @@ def sentry_wrapped_callback(request, *args, **kwargs): op=OP.VIEW_RENDER, name=request.resolver_match.view_name, origin=DjangoIntegration.origin, + only_if_parent=True, ): return callback(request, *args, **kwargs) diff --git a/sentry_sdk/integrations/graphene.py b/sentry_sdk/integrations/graphene.py index 03731dcaaa..828bb0ade5 100644 --- a/sentry_sdk/integrations/graphene.py +++ b/sentry_sdk/integrations/graphene.py @@ -144,7 +144,9 @@ def graphql_span(schema, source, kwargs): if scope.span: _graphql_span = scope.span.start_child(op=op, name=operation_name) else: - _graphql_span = sentry_sdk.start_span(op=op, name=operation_name) + _graphql_span = sentry_sdk.start_span( + op=op, name=operation_name, only_if_parent=True + ) _graphql_span.set_data("graphql.document", source) _graphql_span.set_data("graphql.operation.name", operation_name) diff --git a/sentry_sdk/integrations/grpc/aio/client.py b/sentry_sdk/integrations/grpc/aio/client.py index e8adeba05e..d1f4b352d3 100644 --- a/sentry_sdk/integrations/grpc/aio/client.py +++ b/sentry_sdk/integrations/grpc/aio/client.py @@ -52,6 +52,7 @@ async def intercept_unary_unary( op=OP.GRPC_CLIENT, name="unary unary call to %s" % method.decode(), origin=SPAN_ORIGIN, + only_if_parent=True, ) as span: span.set_data("type", "unary unary") span.set_data("method", method) @@ -82,6 +83,7 @@ async def intercept_unary_stream( op=OP.GRPC_CLIENT, name="unary stream call to %s" % method.decode(), origin=SPAN_ORIGIN, + only_if_parent=True, ) as span: span.set_data("type", "unary stream") span.set_data("method", method) diff --git a/sentry_sdk/integrations/grpc/client.py b/sentry_sdk/integrations/grpc/client.py index a5b4f9f52e..cb456fc9b4 100644 --- a/sentry_sdk/integrations/grpc/client.py +++ b/sentry_sdk/integrations/grpc/client.py @@ -31,6 +31,7 @@ def intercept_unary_unary(self, continuation, client_call_details, request): op=OP.GRPC_CLIENT, name="unary unary call to %s" % method, origin=SPAN_ORIGIN, + only_if_parent=True, ) as span: span.set_data("type", "unary unary") span.set_data("method", method) @@ -52,6 +53,7 @@ def intercept_unary_stream(self, continuation, client_call_details, request): op=OP.GRPC_CLIENT, name="unary stream call to %s" % method, origin=SPAN_ORIGIN, + only_if_parent=True, ) as span: span.set_data("type", "unary stream") span.set_data("method", method) diff --git a/sentry_sdk/integrations/httpx.py b/sentry_sdk/integrations/httpx.py index c4d8e4b4dc..338106c01b 100644 --- a/sentry_sdk/integrations/httpx.py +++ b/sentry_sdk/integrations/httpx.py @@ -59,6 +59,7 @@ def send(self, request, **kwargs): parsed_url.url if parsed_url else SENSITIVE_DATA_SUBSTITUTE, ), origin=HttpxIntegration.origin, + only_if_parent=True, ) as span: data = { SPANDATA.HTTP_METHOD: request.method, @@ -129,6 +130,7 @@ async def send(self, request, **kwargs): parsed_url.url if parsed_url else SENSITIVE_DATA_SUBSTITUTE, ), origin=HttpxIntegration.origin, + only_if_parent=True, ) as span: data = { SPANDATA.HTTP_METHOD: request.method, diff --git a/sentry_sdk/integrations/huey.py b/sentry_sdk/integrations/huey.py index dae3cf6dfa..9a3bfb9b99 100644 --- a/sentry_sdk/integrations/huey.py +++ b/sentry_sdk/integrations/huey.py @@ -61,6 +61,7 @@ def _sentry_enqueue(self, task): op=OP.QUEUE_SUBMIT_HUEY, name=task.name, origin=HueyIntegration.origin, + only_if_parent=True, ): if not isinstance(task, PeriodicTask): # Attach trace propagation data to task kwargs. We do diff --git a/sentry_sdk/integrations/huggingface_hub.py b/sentry_sdk/integrations/huggingface_hub.py index d09f6e2163..ae00618995 100644 --- a/sentry_sdk/integrations/huggingface_hub.py +++ b/sentry_sdk/integrations/huggingface_hub.py @@ -77,6 +77,7 @@ def new_text_generation(*args, **kwargs): op=consts.OP.HUGGINGFACE_HUB_CHAT_COMPLETIONS_CREATE, name="Text Generation", origin=HuggingfaceHubIntegration.origin, + only_if_parent=True, ) span.__enter__() try: diff --git a/sentry_sdk/integrations/langchain.py b/sentry_sdk/integrations/langchain.py index 431fc46bec..afce913d8e 100644 --- a/sentry_sdk/integrations/langchain.py +++ b/sentry_sdk/integrations/langchain.py @@ -143,7 +143,9 @@ def _create_span(self, run_id, parent_id, **kwargs): watched_span = WatchedSpan(parent_span.span.start_child(**kwargs)) parent_span.children.append(watched_span) if watched_span is None: - watched_span = WatchedSpan(sentry_sdk.start_span(**kwargs)) + watched_span = WatchedSpan( + sentry_sdk.start_span(only_if_parent=True, **kwargs) + ) if kwargs.get("op", "").startswith("ai.pipeline."): if kwargs.get("name"): diff --git a/sentry_sdk/integrations/litestar.py b/sentry_sdk/integrations/litestar.py index 4b04dada8a..25e49696db 100644 --- a/sentry_sdk/integrations/litestar.py +++ b/sentry_sdk/integrations/litestar.py @@ -141,6 +141,7 @@ async def _create_span_call(self, scope, receive, send): op=OP.MIDDLEWARE_LITESTAR, name=middleware_name, origin=LitestarIntegration.origin, + only_if_parent=True, ) as middleware_span: middleware_span.set_tag("litestar.middleware_name", middleware_name) @@ -153,6 +154,7 @@ async def _sentry_receive(*args, **kwargs): op=OP.MIDDLEWARE_LITESTAR_RECEIVE, name=getattr(receive, "__qualname__", str(receive)), origin=LitestarIntegration.origin, + only_if_parent=True, ) as span: span.set_tag("litestar.middleware_name", middleware_name) return await receive(*args, **kwargs) @@ -170,6 +172,7 @@ async def _sentry_send(message): op=OP.MIDDLEWARE_LITESTAR_SEND, name=getattr(send, "__qualname__", str(send)), origin=LitestarIntegration.origin, + only_if_parent=True, ) as span: span.set_tag("litestar.middleware_name", middleware_name) return await send(message) diff --git a/sentry_sdk/integrations/openai.py b/sentry_sdk/integrations/openai.py index e6ac36f3cb..86d308fdea 100644 --- a/sentry_sdk/integrations/openai.py +++ b/sentry_sdk/integrations/openai.py @@ -139,6 +139,7 @@ def _new_chat_completion_common(f, *args, **kwargs): op=consts.OP.OPENAI_CHAT_COMPLETIONS_CREATE, description="Chat Completion", origin=OpenAIIntegration.origin, + only_if_parent=True, ) span.__enter__() @@ -324,6 +325,7 @@ def _new_embeddings_create_common(f, *args, **kwargs): op=consts.OP.OPENAI_EMBEDDINGS_CREATE, description="OpenAI Embedding Creation", origin=OpenAIIntegration.origin, + only_if_parent=True, ) as span: if "input" in kwargs and ( should_send_default_pii() and integration.include_prompts diff --git a/sentry_sdk/integrations/pymongo.py b/sentry_sdk/integrations/pymongo.py index f03f70606b..32cb294075 100644 --- a/sentry_sdk/integrations/pymongo.py +++ b/sentry_sdk/integrations/pymongo.py @@ -153,6 +153,7 @@ def started(self, event): op=OP.DB, name=query, origin=PyMongoIntegration.origin, + only_if_parent=True, ) with capture_internal_exceptions(): diff --git a/sentry_sdk/integrations/ray.py b/sentry_sdk/integrations/ray.py index d1910026ab..0290bdf1ef 100644 --- a/sentry_sdk/integrations/ray.py +++ b/sentry_sdk/integrations/ray.py @@ -88,6 +88,7 @@ def _remote_method_with_header_propagation(*args, **kwargs): op=OP.QUEUE_SUBMIT_RAY, name=qualname_from_function(f), origin=RayIntegration.origin, + only_if_parent=True, ) as span: tracing = { k: v diff --git a/sentry_sdk/integrations/redis/_async_common.py b/sentry_sdk/integrations/redis/_async_common.py index f835affbf3..e62aa1a807 100644 --- a/sentry_sdk/integrations/redis/_async_common.py +++ b/sentry_sdk/integrations/redis/_async_common.py @@ -40,6 +40,7 @@ async def _sentry_execute(self, *args, **kwargs): op=OP.DB_REDIS, name="redis.pipeline.execute", origin=SPAN_ORIGIN, + only_if_parent=True, ) as span: with capture_internal_exceptions(): span_data = get_db_data_fn(self) @@ -84,6 +85,7 @@ async def _sentry_execute_command(self, name, *args, **kwargs): op=cache_properties["op"], name=cache_properties["description"], origin=SPAN_ORIGIN, + only_if_parent=True, ) cache_span.__enter__() @@ -93,6 +95,7 @@ async def _sentry_execute_command(self, name, *args, **kwargs): op=db_properties["op"], name=db_properties["description"], origin=SPAN_ORIGIN, + only_if_parent=True, ) db_span.__enter__() diff --git a/sentry_sdk/integrations/redis/_sync_common.py b/sentry_sdk/integrations/redis/_sync_common.py index 9c96ad61d1..63738ea7cb 100644 --- a/sentry_sdk/integrations/redis/_sync_common.py +++ b/sentry_sdk/integrations/redis/_sync_common.py @@ -41,6 +41,7 @@ def sentry_patched_execute(self, *args, **kwargs): op=OP.DB_REDIS, name="redis.pipeline.execute", origin=SPAN_ORIGIN, + only_if_parent=True, ) as span: with capture_internal_exceptions(): span_data = get_db_data_fn(self) diff --git a/sentry_sdk/integrations/socket.py b/sentry_sdk/integrations/socket.py index 0866ceb608..cba448c3a3 100644 --- a/sentry_sdk/integrations/socket.py +++ b/sentry_sdk/integrations/socket.py @@ -57,6 +57,7 @@ def create_connection( op=OP.SOCKET_CONNECTION, name=_get_span_description(address[0], address[1]), origin=SocketIntegration.origin, + only_if_parent=True, ) as span: span.set_data("address", address) span.set_data("timeout", timeout) @@ -83,6 +84,7 @@ def getaddrinfo(host, port, family=0, type=0, proto=0, flags=0): op=OP.SOCKET_DNS, name=_get_span_description(host, port), origin=SocketIntegration.origin, + only_if_parent=True, ) as span: span.set_data("host", host) span.set_data("port", port) diff --git a/sentry_sdk/integrations/starlette.py b/sentry_sdk/integrations/starlette.py index d9db8bd6b8..c8a415a64d 100644 --- a/sentry_sdk/integrations/starlette.py +++ b/sentry_sdk/integrations/starlette.py @@ -164,6 +164,7 @@ async def _create_span_call(app, scope, receive, send, **kwargs): op=OP.MIDDLEWARE_STARLETTE, name=middleware_name, origin=StarletteIntegration.origin, + only_if_parent=True, ) as middleware_span: middleware_span.set_tag("starlette.middleware_name", middleware_name) @@ -174,6 +175,7 @@ async def _sentry_receive(*args, **kwargs): op=OP.MIDDLEWARE_STARLETTE_RECEIVE, name=getattr(receive, "__qualname__", str(receive)), origin=StarletteIntegration.origin, + only_if_parent=True, ) as span: span.set_tag("starlette.middleware_name", middleware_name) return await receive(*args, **kwargs) @@ -189,6 +191,7 @@ async def _sentry_send(*args, **kwargs): op=OP.MIDDLEWARE_STARLETTE_SEND, name=getattr(send, "__qualname__", str(send)), origin=StarletteIntegration.origin, + only_if_parent=True, ) as span: span.set_tag("starlette.middleware_name", middleware_name) return await send(*args, **kwargs) diff --git a/sentry_sdk/integrations/starlite.py b/sentry_sdk/integrations/starlite.py index 8714ee2f08..66f5025c26 100644 --- a/sentry_sdk/integrations/starlite.py +++ b/sentry_sdk/integrations/starlite.py @@ -140,6 +140,7 @@ async def _create_span_call(self, scope, receive, send): op=OP.MIDDLEWARE_STARLITE, name=middleware_name, origin=StarliteIntegration.origin, + only_if_parent=True, ) as middleware_span: middleware_span.set_tag("starlite.middleware_name", middleware_name) @@ -152,6 +153,7 @@ async def _sentry_receive(*args, **kwargs): op=OP.MIDDLEWARE_STARLITE_RECEIVE, name=getattr(receive, "__qualname__", str(receive)), origin=StarliteIntegration.origin, + only_if_parent=True, ) as span: span.set_tag("starlite.middleware_name", middleware_name) return await receive(*args, **kwargs) @@ -169,6 +171,7 @@ async def _sentry_send(message): op=OP.MIDDLEWARE_STARLITE_SEND, name=getattr(send, "__qualname__", str(send)), origin=StarliteIntegration.origin, + only_if_parent=True, ) as span: span.set_tag("starlite.middleware_name", middleware_name) return await send(message) diff --git a/sentry_sdk/integrations/stdlib.py b/sentry_sdk/integrations/stdlib.py index 9ddfe7468d..424e7b88aa 100644 --- a/sentry_sdk/integrations/stdlib.py +++ b/sentry_sdk/integrations/stdlib.py @@ -94,6 +94,7 @@ def putrequest(self, method, url, *args, **kwargs): name="%s %s" % (method, parsed_url.url if parsed_url else SENSITIVE_DATA_SUBSTITUTE), origin="auto.http.stdlib.httplib", + only_if_parent=True, ) data = { @@ -225,6 +226,7 @@ def sentry_patched_popen_init(self, *a, **kw): op=OP.SUBPROCESS, name=description, origin="auto.subprocess.stdlib.subprocess", + only_if_parent=True, ) as span: for k, v in sentry_sdk.get_current_scope().iter_trace_propagation_headers( span=span @@ -275,6 +277,7 @@ def sentry_patched_popen_wait(self, *a, **kw): with sentry_sdk.start_span( op=OP.SUBPROCESS_WAIT, origin="auto.subprocess.stdlib.subprocess", + only_if_parent=True, ) as span: span.set_tag("subprocess.pid", self.pid) return old_popen_wait(self, *a, **kw) @@ -289,6 +292,7 @@ def sentry_patched_popen_communicate(self, *a, **kw): with sentry_sdk.start_span( op=OP.SUBPROCESS_COMMUNICATE, origin="auto.subprocess.stdlib.subprocess", + only_if_parent=True, ) as span: span.set_tag("subprocess.pid", self.pid) return old_popen_communicate(self, *a, **kw) diff --git a/sentry_sdk/integrations/strawberry.py b/sentry_sdk/integrations/strawberry.py index 636779b61d..bf174e9d99 100644 --- a/sentry_sdk/integrations/strawberry.py +++ b/sentry_sdk/integrations/strawberry.py @@ -191,6 +191,7 @@ def on_operation(self): op=op, name=description, origin=StrawberryIntegration.origin, + only_if_parent=True, ) self.graphql_span.set_data("graphql.operation.type", operation_type) diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py index c2bd5734c5..aba7d4f49d 100644 --- a/sentry_sdk/tracing.py +++ b/sentry_sdk/tracing.py @@ -10,6 +10,8 @@ format_span_id, Span as OtelSpan, TraceState, + get_current_span, + INVALID_SPAN, ) from opentelemetry.trace.status import StatusCode from opentelemetry.sdk.trace import ReadableSpan @@ -683,9 +685,9 @@ def get_trace_context(self): rv["status"] = self.status if self.containing_transaction: - rv[ - "dynamic_sampling_context" - ] = self.containing_transaction.get_baggage().dynamic_sampling_context() + rv["dynamic_sampling_context"] = ( + self.containing_transaction.get_baggage().dynamic_sampling_context() + ) data = {} @@ -1204,6 +1206,7 @@ def __init__( origin=None, # type: Optional[str] name=None, # type: Optional[str] source=TRANSACTION_SOURCE_CUSTOM, # type: str + only_if_parent=False, # type: bool otel_span=None, # type: Optional[OtelSpan] **_, # type: dict[str, object] ): @@ -1214,29 +1217,40 @@ def __init__( listed in the signature. These additional arguments are ignored. If otel_span is passed explicitly, just acts as a proxy. + + If only_if_parent is True, just return an INVALID_SPAN + and avoid instrumentation if there's no active parent span. """ if otel_span is not None: self._otel_span = otel_span else: - from sentry_sdk.integrations.opentelemetry.utils import ( - convert_to_otel_timestamp, + skip_span = ( + only_if_parent and not get_current_span().get_span_context().is_valid ) + if skip_span: + self._otel_span = INVALID_SPAN + else: + from sentry_sdk.integrations.opentelemetry.utils import ( + convert_to_otel_timestamp, + ) - if start_timestamp is not None: - # OTel timestamps have nanosecond precision - start_timestamp = convert_to_otel_timestamp(start_timestamp) + if start_timestamp is not None: + # OTel timestamps have nanosecond precision + start_timestamp = convert_to_otel_timestamp(start_timestamp) - span_name = name or description or op or "" - self._otel_span = tracer.start_span(span_name, start_time=start_timestamp) + span_name = name or description or op or "" + self._otel_span = tracer.start_span( + span_name, start_time=start_timestamp + ) - self.origin = origin or DEFAULT_SPAN_ORIGIN - self.op = op - self.description = description - self.name = span_name - self.source = source + self.origin = origin or DEFAULT_SPAN_ORIGIN + self.op = op + self.description = description + self.name = span_name + self.source = source - if status is not None: - self.set_status(status) + if status is not None: + self.set_status(status) def __eq__(self, other): # type: (POTelSpan) -> bool @@ -1454,7 +1468,7 @@ def start_child(self, **kwargs): # type: (**Any) -> POTelSpan kwargs.setdefault("sampled", self.sampled) - span = POTelSpan(**kwargs) + span = POTelSpan(only_if_parent=True, **kwargs) return span def iter_headers(self): From 0f01912a3420eecbda54ab0a8153ad74b2dce206 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Mon, 11 Nov 2024 14:14:14 +0100 Subject: [PATCH 088/244] Fixed tests --- sentry_sdk/integrations/arq.py | 20 ++++++++++++-------- 1 file changed, 12 insertions(+), 8 deletions(-) diff --git a/sentry_sdk/integrations/arq.py b/sentry_sdk/integrations/arq.py index 345e030fb1..097a89eb22 100644 --- a/sentry_sdk/integrations/arq.py +++ b/sentry_sdk/integrations/arq.py @@ -5,7 +5,7 @@ from sentry_sdk.integrations import DidNotEnable, Integration from sentry_sdk.integrations.logging import ignore_logger from sentry_sdk.scope import should_send_default_pii -from sentry_sdk.tracing import Transaction, TRANSACTION_SOURCE_TASK +from sentry_sdk.tracing import TRANSACTION_SOURCE_TASK from sentry_sdk.utils import ( capture_internal_exceptions, ensure_integration_enabled, @@ -37,6 +37,8 @@ ARQ_CONTROL_FLOW_EXCEPTIONS = (JobExecutionFailed, Retry, RetryJob) +DEFAULT_TRANSACTION_NAME = "unknown arq task" + class ArqIntegration(Integration): identifier = "arq" @@ -101,18 +103,20 @@ async def _sentry_run_job(self, job_id, score): with sentry_sdk.isolation_scope() as scope: scope._name = "arq" + scope.set_transaction_name( + DEFAULT_TRANSACTION_NAME, source=TRANSACTION_SOURCE_TASK, + ) scope.clear_breadcrumbs() - transaction = Transaction( - name="unknown arq task", - status="ok", + with sentry_sdk.start_span( op=OP.QUEUE_TASK_ARQ, + name=DEFAULT_TRANSACTION_NAME, source=TRANSACTION_SOURCE_TASK, origin=ArqIntegration.origin, - ) - - with sentry_sdk.start_transaction(transaction): - return await old_run_job(self, job_id, score) + ) as span: + return_value = await old_run_job(self, job_id, score) + span.set_status(SPANSTATUS.OK) + return return_value Worker.run_job = _sentry_run_job From 706300c9d93f7e735f703beedd57c669cb95c6f6 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Mon, 11 Nov 2024 14:22:01 +0100 Subject: [PATCH 089/244] Make it more obvious how otel allows the span status to be set (#3759) --- sentry_sdk/integrations/huey.py | 3 ++- sentry_sdk/tracing.py | 14 +++++++++----- 2 files changed, 11 insertions(+), 6 deletions(-) diff --git a/sentry_sdk/integrations/huey.py b/sentry_sdk/integrations/huey.py index 9a3bfb9b99..433a7c17c6 100644 --- a/sentry_sdk/integrations/huey.py +++ b/sentry_sdk/integrations/huey.py @@ -166,7 +166,8 @@ def _sentry_execute(self, task, timestamp=None): source=TRANSACTION_SOURCE_TASK, origin=HueyIntegration.origin, ) as transaction: + return_value = old_execute(self, task, timestamp) transaction.set_status(SPANSTATUS.OK) - return old_execute(self, task, timestamp) + return return_value Huey._execute = _sentry_execute diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py index aba7d4f49d..2bc3c9b8a7 100644 --- a/sentry_sdk/tracing.py +++ b/sentry_sdk/tracing.py @@ -1541,13 +1541,17 @@ def set_attribute(self, key, value): def set_status(self, status): # type: (str) -> None if status == SPANSTATUS.OK: - otel_status = StatusCode.OK - otel_description = None + # Do not set status if it's already set. + # We would override an error status with OK. + if self._otel_span.status.status_code == StatusCode.UNSET: + self._otel_span.set_status(StatusCode.OK, None) else: - otel_status = StatusCode.ERROR - otel_description = status + # OpenTelemetry does not allow setting and error status + # if the span is already set to OK + if self._otel_span.status.status_code == StatusCode.OK: + return - self._otel_span.set_status(otel_status, otel_description) + self._otel_span.set_status(StatusCode.ERROR, status) def set_measurement(self, name, value, unit=""): # type: (str, float, MeasurementUnit) -> None From b3084d14b3afac4678b19e4084f516272f09d3fe Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Mon, 11 Nov 2024 16:24:37 +0100 Subject: [PATCH 090/244] Revert tracing set_status change and just set OK properly in huey --- sentry_sdk/integrations/huey.py | 8 ++++---- sentry_sdk/tracing.py | 14 +++++--------- 2 files changed, 9 insertions(+), 13 deletions(-) diff --git a/sentry_sdk/integrations/huey.py b/sentry_sdk/integrations/huey.py index 433a7c17c6..f12c63705b 100644 --- a/sentry_sdk/integrations/huey.py +++ b/sentry_sdk/integrations/huey.py @@ -135,6 +135,8 @@ def _sentry_execute(*args, **kwargs): exc_info = sys.exc_info() _capture_exception(exc_info) reraise(*exc_info) + else: + sentry_sdk.get_current_scope().transaction.set_status(SPANSTATUS.OK) return result @@ -165,9 +167,7 @@ def _sentry_execute(self, task, timestamp=None): op=OP.QUEUE_TASK_HUEY, source=TRANSACTION_SOURCE_TASK, origin=HueyIntegration.origin, - ) as transaction: - return_value = old_execute(self, task, timestamp) - transaction.set_status(SPANSTATUS.OK) - return return_value + ): + return old_execute(self, task, timestamp) Huey._execute = _sentry_execute diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py index 2bc3c9b8a7..aba7d4f49d 100644 --- a/sentry_sdk/tracing.py +++ b/sentry_sdk/tracing.py @@ -1541,17 +1541,13 @@ def set_attribute(self, key, value): def set_status(self, status): # type: (str) -> None if status == SPANSTATUS.OK: - # Do not set status if it's already set. - # We would override an error status with OK. - if self._otel_span.status.status_code == StatusCode.UNSET: - self._otel_span.set_status(StatusCode.OK, None) + otel_status = StatusCode.OK + otel_description = None else: - # OpenTelemetry does not allow setting and error status - # if the span is already set to OK - if self._otel_span.status.status_code == StatusCode.OK: - return + otel_status = StatusCode.ERROR + otel_description = status - self._otel_span.set_status(StatusCode.ERROR, status) + self._otel_span.set_status(otel_status, otel_description) def set_measurement(self, name, value, unit=""): # type: (str, float, MeasurementUnit) -> None From fbbf5e76f058ba5f49fe203bf98feed57508d2ca Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Mon, 11 Nov 2024 16:27:37 +0100 Subject: [PATCH 091/244] Fix most starlette tests (#3749) mainly some middleware span reordering --- .../integrations/starlette/test_starlette.py | 36 +++++++++---------- 1 file changed, 17 insertions(+), 19 deletions(-) diff --git a/tests/integrations/starlette/test_starlette.py b/tests/integrations/starlette/test_starlette.py index fd47895f5a..22ed10b7cb 100644 --- a/tests/integrations/starlette/test_starlette.py +++ b/tests/integrations/starlette/test_starlette.py @@ -659,9 +659,9 @@ def test_middleware_spans(sentry_init, capture_events): "AuthenticationMiddleware", "ExceptionMiddleware", "AuthenticationMiddleware", # 'op': 'middleware.starlette.send' - "ServerErrorMiddleware", # 'op': 'middleware.starlette.send' "AuthenticationMiddleware", # 'op': 'middleware.starlette.send' "ServerErrorMiddleware", # 'op': 'middleware.starlette.send' + "ServerErrorMiddleware", # 'op': 'middleware.starlette.send' ] assert len(transaction_event["spans"]) == len(expected_middleware_spans) @@ -736,23 +736,23 @@ def test_middleware_callback_spans(sentry_init, capture_events): }, { "op": "middleware.starlette.send", - "description": "ServerErrorMiddleware.__call__.._send", - "tags": {"starlette.middleware_name": "SampleMiddleware"}, + "description": "SampleMiddleware.__call__..do_stuff", + "tags": {"starlette.middleware_name": "ExceptionMiddleware"}, }, { "op": "middleware.starlette.send", - "description": "SentryAsgiMiddleware._run_app.._sentry_wrapped_send", - "tags": {"starlette.middleware_name": "ServerErrorMiddleware"}, + "description": "ServerErrorMiddleware.__call__.._send", + "tags": {"starlette.middleware_name": "SampleMiddleware"}, }, { "op": "middleware.starlette.send", - "description": "SampleMiddleware.__call__..do_stuff", - "tags": {"starlette.middleware_name": "ExceptionMiddleware"}, + "description": "ServerErrorMiddleware.__call__.._send", + "tags": {"starlette.middleware_name": "SampleMiddleware"}, }, { "op": "middleware.starlette.send", - "description": "ServerErrorMiddleware.__call__.._send", - "tags": {"starlette.middleware_name": "SampleMiddleware"}, + "description": "SentryAsgiMiddleware._run_app.._sentry_wrapped_send", + "tags": {"starlette.middleware_name": "ServerErrorMiddleware"}, }, { "op": "middleware.starlette.send", @@ -830,16 +830,16 @@ def test_middleware_partial_receive_send(sentry_init, capture_events): "description": "ServerErrorMiddleware.__call__.._send", "tags": {"starlette.middleware_name": "SamplePartialReceiveSendMiddleware"}, }, - { - "op": "middleware.starlette.send", - "description": "SentryAsgiMiddleware._run_app.._sentry_wrapped_send", - "tags": {"starlette.middleware_name": "ServerErrorMiddleware"}, - }, { "op": "middleware.starlette", "description": "ExceptionMiddleware", "tags": {"starlette.middleware_name": "ExceptionMiddleware"}, }, + { + "op": "middleware.starlette.send", + "description": "SentryAsgiMiddleware._run_app.._sentry_wrapped_send", + "tags": {"starlette.middleware_name": "ServerErrorMiddleware"}, + }, { "op": "middleware.starlette.send", "description": "functools.partial(.my_send at ", @@ -886,13 +886,13 @@ def test_active_thread_id(sentry_init, capture_envelopes, teardown_profiling, en sentry_init( traces_sample_rate=1.0, profiles_sample_rate=1.0, + integrations=[StarletteIntegration()], ) app = starlette_app_factory() - asgi_app = SentryAsgiMiddleware(app) envelopes = capture_envelopes() - client = TestClient(asgi_app) + client = TestClient(app) response = client.get(endpoint) assert response.status_code == 200 @@ -1244,9 +1244,7 @@ def test_transaction_http_method_default(sentry_init, capture_events): """ sentry_init( traces_sample_rate=1.0, - integrations=[ - StarletteIntegration(), - ], + integrations=[StarletteIntegration()], ) events = capture_events() From 665b4a549588a9f1cf369ae492fa79604cd29e65 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Mon, 11 Nov 2024 16:47:05 +0100 Subject: [PATCH 092/244] Fix rq tests (#3762) * Fix transaction name setting and forked some tests to make them work in potel * Transactions in transactins is undefined behavior, so remove this. --- sentry_sdk/integrations/arq.py | 3 ++- sentry_sdk/integrations/rq.py | 19 ++++++++++++------ .../integrations/opentelemetry/test_utils.py | 4 +--- tests/integrations/rq/test_rq.py | 20 +++++++++++-------- 4 files changed, 28 insertions(+), 18 deletions(-) diff --git a/sentry_sdk/integrations/arq.py b/sentry_sdk/integrations/arq.py index 097a89eb22..b0f9a6a443 100644 --- a/sentry_sdk/integrations/arq.py +++ b/sentry_sdk/integrations/arq.py @@ -104,7 +104,8 @@ async def _sentry_run_job(self, job_id, score): with sentry_sdk.isolation_scope() as scope: scope._name = "arq" scope.set_transaction_name( - DEFAULT_TRANSACTION_NAME, source=TRANSACTION_SOURCE_TASK, + DEFAULT_TRANSACTION_NAME, + source=TRANSACTION_SOURCE_TASK, ) scope.clear_breadcrumbs() diff --git a/sentry_sdk/integrations/rq.py b/sentry_sdk/integrations/rq.py index 7e84b15681..7e016bfa9a 100644 --- a/sentry_sdk/integrations/rq.py +++ b/sentry_sdk/integrations/rq.py @@ -32,6 +32,8 @@ from rq.job import Job +DEFAULT_TRANSACTION_NAME = "unknown RQ task" + class RqIntegration(Integration): identifier = "rq" @@ -55,22 +57,27 @@ def setup_once(): def sentry_patched_perform_job(self, job, *args, **kwargs): # type: (Any, Job, *Queue, **Any) -> bool with sentry_sdk.new_scope() as scope: + try: + transaction_name = job.func_name or DEFAULT_TRANSACTION_NAME + except AttributeError: + transaction_name = DEFAULT_TRANSACTION_NAME + + scope.set_transaction_name( + transaction_name, source=TRANSACTION_SOURCE_TASK + ) scope.clear_breadcrumbs() scope.add_event_processor(_make_event_processor(weakref.ref(job))) with sentry_sdk.continue_trace( job.meta.get("_sentry_trace_headers") or {} ): - with sentry_sdk.start_transaction( + with sentry_sdk.start_span( op=OP.QUEUE_TASK_RQ, - name="unknown RQ task", + name=transaction_name, source=TRANSACTION_SOURCE_TASK, origin=RqIntegration.origin, custom_sampling_context={"rq_job": job}, - ) as transaction: - with capture_internal_exceptions(): - transaction.name = job.func_name - + ): rv = old_perform_job(self, job, *args, **kwargs) if self.is_horse: diff --git a/tests/integrations/opentelemetry/test_utils.py b/tests/integrations/opentelemetry/test_utils.py index b6f1072480..fde66bf590 100644 --- a/tests/integrations/opentelemetry/test_utils.py +++ b/tests/integrations/opentelemetry/test_utils.py @@ -334,9 +334,7 @@ def test_span_data_for_db_query(): ), ( SpanKind.SERVER, - Status( - StatusCode.ERROR, "I'm a teapot" - ), + Status(StatusCode.ERROR, "I'm a teapot"), { "http.method": "POST", "http.route": "/some/route", diff --git a/tests/integrations/rq/test_rq.py b/tests/integrations/rq/test_rq.py index ffd6f458e1..f8bb8f0fe0 100644 --- a/tests/integrations/rq/test_rq.py +++ b/tests/integrations/rq/test_rq.py @@ -5,7 +5,6 @@ from fakeredis import FakeStrictRedis import sentry_sdk -from sentry_sdk import start_transaction from sentry_sdk.integrations.rq import RqIntegration from sentry_sdk.utils import parse_version @@ -46,6 +45,7 @@ def do_trick(dog, trick): return "{}, can you {}? Good dog!".format(dog, trick) +@pytest.mark.forked def test_basic(sentry_init, capture_events): sentry_init(integrations=[RqIntegration()]) events = capture_events() @@ -78,6 +78,7 @@ def test_basic(sentry_init, capture_events): assert "started_at" in extra +@pytest.mark.forked def test_transport_shutdown(sentry_init, capture_events_forksafe): sentry_init(integrations=[RqIntegration()]) @@ -96,6 +97,7 @@ def test_transport_shutdown(sentry_init, capture_events_forksafe): assert exception["type"] == "ZeroDivisionError" +@pytest.mark.forked def test_transaction_with_error( sentry_init, capture_events, DictionaryContaining # noqa:N803 ): @@ -131,6 +133,7 @@ def test_transaction_with_error( ) +@pytest.mark.forked def test_error_has_trace_context_if_tracing_disabled( sentry_init, capture_events, @@ -149,6 +152,7 @@ def test_error_has_trace_context_if_tracing_disabled( assert error_event["contexts"]["trace"] +@pytest.mark.forked def test_tracing_enabled( sentry_init, capture_events, @@ -159,18 +163,17 @@ def test_tracing_enabled( queue = rq.Queue(connection=FakeStrictRedis()) worker = rq.SimpleWorker([queue], connection=queue.connection) - with start_transaction(op="rq transaction") as transaction: - queue.enqueue(crashing_job, foo=None) - worker.work(burst=True) + queue.enqueue(crashing_job, foo=None) + worker.work(burst=True) - error_event, envelope, _ = events + error_event, transaction = events assert error_event["transaction"] == "tests.integrations.rq.test_rq.crashing_job" - assert error_event["contexts"]["trace"]["trace_id"] == transaction.trace_id - - assert envelope["contexts"]["trace"] == error_event["contexts"]["trace"] + assert transaction["transaction"] == "tests.integrations.rq.test_rq.crashing_job" + assert transaction["contexts"]["trace"] == error_event["contexts"]["trace"] +@pytest.mark.forked def test_tracing_disabled( sentry_init, capture_events, @@ -251,6 +254,7 @@ def test_traces_sampler_gets_correct_values_in_sampling_context( ) +@pytest.mark.forked @pytest.mark.skipif( parse_version(rq.__version__) < (1, 5), reason="At least rq-1.5 required" ) From ebca500bc323bb15d1a1d6e4825b5cedaab38150 Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Mon, 11 Nov 2024 16:49:13 +0100 Subject: [PATCH 093/244] Fix django db tests again --- tests/integrations/django/test_basic.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/integrations/django/test_basic.py b/tests/integrations/django/test_basic.py index 2baa51e590..64482a88da 100644 --- a/tests/integrations/django/test_basic.py +++ b/tests/integrations/django/test_basic.py @@ -415,7 +415,7 @@ def test_sql_dict_query_params(sentry_init, capture_events): assert crumb["message"] == ( "SELECT count(*) FROM people_person WHERE foo = %(my_foo)s" ) - assert crumb["data"]["db.params"] == '{"my_foo": 10}' + assert crumb["data"]["db.params"] == {"my_foo": 10} @pytest.mark.forked @@ -477,7 +477,7 @@ def test_sql_psycopg2_string_composition(sentry_init, capture_events, query): (event,) = events crumb = event["breadcrumbs"]["values"][-1] assert crumb["message"] == ('SELECT %(my_param)s FROM "foobar"') - assert crumb["data"]["db.params"] == '{"my_param": 10}' + assert crumb["data"]["db.params"] == {"my_param": 10} @pytest.mark.forked @@ -530,7 +530,7 @@ def test_sql_psycopg2_placeholders(sentry_init, capture_events): { "category": "query", "data": { - "db.params": '{"first_var": "fizz", "second_var": "not a date"}', + "db.params": {"first_var": "fizz", "second_var": "not a date"}, "db.paramstyle": "format", }, "message": 'insert into my_test_table ("foo", "bar") values (%(first_var)s, ' From 3f638f7578f2c51701fadd18f1da2b78450ca5df Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Mon, 11 Nov 2024 17:12:52 +0100 Subject: [PATCH 094/244] Reverted test forking --- tests/integrations/rq/test_rq.py | 7 ------- 1 file changed, 7 deletions(-) diff --git a/tests/integrations/rq/test_rq.py b/tests/integrations/rq/test_rq.py index f8bb8f0fe0..199441e3e4 100644 --- a/tests/integrations/rq/test_rq.py +++ b/tests/integrations/rq/test_rq.py @@ -45,7 +45,6 @@ def do_trick(dog, trick): return "{}, can you {}? Good dog!".format(dog, trick) -@pytest.mark.forked def test_basic(sentry_init, capture_events): sentry_init(integrations=[RqIntegration()]) events = capture_events() @@ -78,7 +77,6 @@ def test_basic(sentry_init, capture_events): assert "started_at" in extra -@pytest.mark.forked def test_transport_shutdown(sentry_init, capture_events_forksafe): sentry_init(integrations=[RqIntegration()]) @@ -97,7 +95,6 @@ def test_transport_shutdown(sentry_init, capture_events_forksafe): assert exception["type"] == "ZeroDivisionError" -@pytest.mark.forked def test_transaction_with_error( sentry_init, capture_events, DictionaryContaining # noqa:N803 ): @@ -133,7 +130,6 @@ def test_transaction_with_error( ) -@pytest.mark.forked def test_error_has_trace_context_if_tracing_disabled( sentry_init, capture_events, @@ -152,7 +148,6 @@ def test_error_has_trace_context_if_tracing_disabled( assert error_event["contexts"]["trace"] -@pytest.mark.forked def test_tracing_enabled( sentry_init, capture_events, @@ -173,7 +168,6 @@ def test_tracing_enabled( assert transaction["contexts"]["trace"] == error_event["contexts"]["trace"] -@pytest.mark.forked def test_tracing_disabled( sentry_init, capture_events, @@ -254,7 +248,6 @@ def test_traces_sampler_gets_correct_values_in_sampling_context( ) -@pytest.mark.forked @pytest.mark.skipif( parse_version(rq.__version__) < (1, 5), reason="At least rq-1.5 required" ) From afd8cc598b620db0203c7f728563e37d7bb7b50f Mon Sep 17 00:00:00 2001 From: Ivana Kellyer Date: Tue, 12 Nov 2024 15:19:17 +0100 Subject: [PATCH 095/244] Remove custom_sampling_context (#3747) - Add support for the sampled flag for start_span and respect it when making sampling decisions. - Rework sampling_context in traces_sampler to work with span attributes instead. Make sure we still have the same data accessible as now. We could go one step further and change the format of sampling_context to just be the actual span attributes without any postprocessing into the current format. I kept the format in line with what we have now to make it easier to update. See #3746 Closes #3739 This is a breaking change since we're removing custom_sampling_context. It'll break multiple integrations until we fix them (see #3746). --- MIGRATION_GUIDE.md | 2 ++ sentry_sdk/api.py | 15 +++----------- .../integrations/opentelemetry/consts.py | 1 + .../integrations/opentelemetry/sampler.py | 20 ++++++++++++------- .../integrations/opentelemetry/scope.py | 11 +++++----- sentry_sdk/scope.py | 8 +------- sentry_sdk/tracing.py | 17 ++++++++++++++-- tests/tracing/test_sampling.py | 4 ++-- 8 files changed, 42 insertions(+), 36 deletions(-) diff --git a/MIGRATION_GUIDE.md b/MIGRATION_GUIDE.md index 495bfff75e..0095cafab7 100644 --- a/MIGRATION_GUIDE.md +++ b/MIGRATION_GUIDE.md @@ -19,11 +19,13 @@ Looking to upgrade from Sentry SDK 2.x to 3.x? Here's a comprehensive list of wh - Redis integration: In Redis pipeline spans there is no `span["data"]["redis.commands"]` that contains a dict `{"count": 3, "first_ten": ["cmd1", "cmd2", ...]}` but instead `span["data"]["redis.commands.count"]` (containing `3`) and `span["data"]["redis.commands.first_ten"]` (containing `["cmd1", "cmd2", ...]`). - clickhouse-driver integration: The query is now available under the `db.query.text` span attribute (only if `send_default_pii` is `True`). - `sentry_sdk.init` now returns `None` instead of a context manager. +- The `sampling_context` argument of `traces_sampler` now additionally contains all span attributes known at span start. ### Removed - Spans no longer have a `description`. Use `name` instead. - Dropped support for Python 3.6. +- The `custom_sampling_context` parameter of `start_transaction` has been removed. Use `attributes` instead to set key-value pairs of data that should be accessible in the traces sampler. Note that span attributes need to conform to the [OpenTelemetry specification](https://opentelemetry.io/docs/concepts/signals/traces/#attributes), meaning only certain types can be set as values. - The PyMongo integration no longer sets tags. The data is still accessible via span attributes. - The PyMongo integration doesn't set `operation_ids` anymore. The individual IDs (`operation_id`, `request_id`, `session_id`) are now accessible as separate span attributes. - `sentry_sdk.metrics` and associated metrics APIs have been removed as Sentry no longer accepts metrics data in this form. See https://sentry.zendesk.com/hc/en-us/articles/26369339769883-Upcoming-API-Changes-to-Metrics diff --git a/sentry_sdk/api.py b/sentry_sdk/api.py index 7d06abf660..a44d3f440e 100644 --- a/sentry_sdk/api.py +++ b/sentry_sdk/api.py @@ -40,7 +40,6 @@ ExcInfo, MeasurementUnit, LogLevelStr, - SamplingContext, ) from sentry_sdk.tracing import Span, TransactionKwargs @@ -239,12 +238,8 @@ def flush( return get_client().flush(timeout=timeout, callback=callback) -def start_span( - *, - custom_sampling_context=None, - **kwargs, # type: Any -): - # type: (...) -> POTelSpan +def start_span(**kwargs): + # type: (type.Any) -> POTelSpan """ Start and return a span. @@ -257,13 +252,11 @@ def start_span( of the `with` block. If not using context managers, call the `finish()` method. """ - # TODO: Consider adding type hints to the method signature. - return get_current_scope().start_span(custom_sampling_context, **kwargs) + return get_current_scope().start_span(**kwargs) def start_transaction( transaction=None, # type: Optional[Transaction] - custom_sampling_context=None, # type: Optional[SamplingContext] **kwargs, # type: Unpack[TransactionKwargs] ): # type: (...) -> POTelSpan @@ -295,14 +288,12 @@ def start_transaction( :param transaction: The transaction to start. If omitted, we create and start a new transaction. - :param custom_sampling_context: The transaction's custom sampling context. :param kwargs: Optional keyword arguments to be passed to the Transaction constructor. See :py:class:`sentry_sdk.tracing.Transaction` for available arguments. """ return start_span( span=transaction, - custom_sampling_context=custom_sampling_context, **kwargs, ) diff --git a/sentry_sdk/integrations/opentelemetry/consts.py b/sentry_sdk/integrations/opentelemetry/consts.py index 6d7c91f3f1..1585e8d893 100644 --- a/sentry_sdk/integrations/opentelemetry/consts.py +++ b/sentry_sdk/integrations/opentelemetry/consts.py @@ -30,3 +30,4 @@ class SentrySpanAttribute: NAME = "sentry.name" SOURCE = "sentry.source" CONTEXT = "sentry.context" + CUSTOM_SAMPLED = "sentry.custom_sampled" # used for saving start_span(sampled=X) diff --git a/sentry_sdk/integrations/opentelemetry/sampler.py b/sentry_sdk/integrations/opentelemetry/sampler.py index ed8ca36ebd..79e2ec7d8f 100644 --- a/sentry_sdk/integrations/opentelemetry/sampler.py +++ b/sentry_sdk/integrations/opentelemetry/sampler.py @@ -2,7 +2,6 @@ from typing import cast from opentelemetry import trace - from opentelemetry.sdk.trace.sampling import Sampler, SamplingResult, Decision from opentelemetry.trace.span import TraceState @@ -12,6 +11,7 @@ from sentry_sdk.integrations.opentelemetry.consts import ( TRACESTATE_SAMPLED_KEY, TRACESTATE_SAMPLE_RATE_KEY, + SentrySpanAttribute, ) from typing import TYPE_CHECKING @@ -114,28 +114,34 @@ def should_sample( parent_span_context = trace.get_current_span(parent_context).get_span_context() + attributes = attributes or {} + # No tracing enabled, thus no sampling if not has_tracing_enabled(client.options): return dropped_result(parent_span_context, attributes) - sample_rate = None + # Explicit sampled value provided at start_span + if attributes.get(SentrySpanAttribute.CUSTOM_SAMPLED) is not None: + sample_rate = float(attributes[SentrySpanAttribute.CUSTOM_SAMPLED]) + if sample_rate > 0: + return sampled_result(parent_span_context, attributes, sample_rate) + else: + return dropped_result(parent_span_context, attributes) - # Check if sampled=True was passed to start_transaction - # TODO-anton: Do we want to keep the start_transaction(sampled=True) thing? + sample_rate = None # Check if there is a traces_sampler # Traces_sampler is responsible to check parent sampled to have full transactions. has_traces_sampler = callable(client.options.get("traces_sampler")) if has_traces_sampler: - # TODO-anton: Make proper sampling_context - # TODO-neel-potel: Make proper sampling_context sampling_context = { "transaction_context": { "name": name, + "op": attributes.get(SentrySpanAttribute.OP), }, "parent_sampled": get_parent_sampled(parent_span_context, trace_id), } - + sampling_context.update(attributes) sample_rate = client.options["traces_sampler"](sampling_context) else: diff --git a/sentry_sdk/integrations/opentelemetry/scope.py b/sentry_sdk/integrations/opentelemetry/scope.py index 0c0087dae1..82828d61de 100644 --- a/sentry_sdk/integrations/opentelemetry/scope.py +++ b/sentry_sdk/integrations/opentelemetry/scope.py @@ -24,7 +24,6 @@ from typing import Tuple, Optional, Generator, Dict, Any from typing_extensions import Unpack - from sentry_sdk._types import SamplingContext from sentry_sdk.tracing import TransactionKwargs @@ -112,17 +111,17 @@ def _incoming_otel_span_context(self): return span_context - def start_transaction(self, custom_sampling_context=None, **kwargs): - # type: (Optional[SamplingContext], Unpack[TransactionKwargs]) -> POTelSpan + def start_transaction(self, **kwargs): + # type: (Unpack[TransactionKwargs]) -> POTelSpan """ .. deprecated:: 3.0.0 This function is deprecated and will be removed in a future release. Use :py:meth:`sentry_sdk.start_span` instead. """ - return self.start_span(custom_sampling_context=custom_sampling_context) + return self.start_span(**kwargs) - def start_span(self, custom_sampling_context=None, **kwargs): - # type: (Optional[SamplingContext], Any) -> POTelSpan + def start_span(self, **kwargs): + # type: (Any) -> POTelSpan return POTelSpan(**kwargs, scope=self) diff --git a/sentry_sdk/scope.py b/sentry_sdk/scope.py index 983b38bf2c..2a6700b178 100644 --- a/sentry_sdk/scope.py +++ b/sentry_sdk/scope.py @@ -946,9 +946,7 @@ def add_breadcrumb(self, crumb=None, hint=None, **kwargs): while len(self._breadcrumbs) > max_breadcrumbs: self._breadcrumbs.popleft() - def start_transaction( - self, transaction=None, custom_sampling_context=None, **kwargs - ): + def start_transaction(self, transaction=None, **kwargs): # type: (Optional[Transaction], Optional[SamplingContext], Unpack[TransactionKwargs]) -> Union[Transaction, NoOpSpan] """ Start and return a transaction. @@ -974,7 +972,6 @@ def start_transaction( :param transaction: The transaction to start. If omitted, we create and start a new transaction. - :param custom_sampling_context: The transaction's custom sampling context. :param kwargs: Optional keyword arguments to be passed to the Transaction constructor. See :py:class:`sentry_sdk.tracing.Transaction` for available arguments. @@ -985,8 +982,6 @@ def start_transaction( try_autostart_continuous_profiler() - custom_sampling_context = custom_sampling_context or {} - # if we haven't been given a transaction, make one transaction = Transaction(**kwargs) @@ -996,7 +991,6 @@ def start_transaction( "transaction_context": transaction.to_json(), "parent_sampled": transaction.parent_sampled, } - sampling_context.update(custom_sampling_context) transaction._set_initial_sampling_decision(sampling_context=sampling_context) if transaction.sampled: diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py index aba7d4f49d..70744d2d71 100644 --- a/sentry_sdk/tracing.py +++ b/sentry_sdk/tracing.py @@ -43,6 +43,8 @@ from typing_extensions import TypedDict, Unpack + from opentelemetry.utils import types as OTelSpanAttributes + P = ParamSpec("P") R = TypeVar("R") @@ -1202,10 +1204,12 @@ def __init__( op=None, # type: Optional[str] description=None, # type: Optional[str] status=None, # type: Optional[str] + sampled=None, # type: Optional[bool] start_timestamp=None, # type: Optional[Union[datetime, float]] origin=None, # type: Optional[str] name=None, # type: Optional[str] source=TRANSACTION_SOURCE_CUSTOM, # type: str + attributes=None, # type: OTelSpanAttributes only_if_parent=False, # type: bool otel_span=None, # type: Optional[OtelSpan] **_, # type: dict[str, object] @@ -1230,6 +1234,9 @@ def __init__( if skip_span: self._otel_span = INVALID_SPAN else: + from sentry_sdk.integrations.opentelemetry.consts import ( + SentrySpanAttribute, + ) from sentry_sdk.integrations.opentelemetry.utils import ( convert_to_otel_timestamp, ) @@ -1239,12 +1246,18 @@ def __init__( start_timestamp = convert_to_otel_timestamp(start_timestamp) span_name = name or description or op or "" + + # Prepopulate some attrs so that they're accessible in traces_sampler + attributes = attributes or {} + attributes[SentrySpanAttribute.OP] = op + if sampled is not None: + attributes[SentrySpanAttribute.CUSTOM_SAMPLED] = sampled + self._otel_span = tracer.start_span( - span_name, start_time=start_timestamp + span_name, start_time=start_timestamp, attributes=attributes ) self.origin = origin or DEFAULT_SPAN_ORIGIN - self.op = op self.description = description self.name = span_name self.source = source diff --git a/tests/tracing/test_sampling.py b/tests/tracing/test_sampling.py index 2e6ed0dab3..70baacc951 100644 --- a/tests/tracing/test_sampling.py +++ b/tests/tracing/test_sampling.py @@ -211,13 +211,13 @@ def test_passes_parent_sampling_decision_in_sampling_context( assert sampling_context["parent_sampled"]._mock_wraps is parent_sampling_decision -def test_passes_custom_samling_context_from_start_transaction_to_traces_sampler( +def test_passes_attributes_from_start_span_to_traces_sampler( sentry_init, DictionaryContaining # noqa: N803 ): traces_sampler = mock.Mock() sentry_init(traces_sampler=traces_sampler) - start_transaction(custom_sampling_context={"dogs": "yes", "cats": "maybe"}) + start_transaction(attributes={"dogs": "yes", "cats": "maybe"}) traces_sampler.assert_any_call( DictionaryContaining({"dogs": "yes", "cats": "maybe"}) From 766f4c5b3c1946129395eddfe5458926883515e7 Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Tue, 12 Nov 2024 16:08:08 +0100 Subject: [PATCH 096/244] Fix leftover starlette tests --- tests/integrations/starlette/test_starlette.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tests/integrations/starlette/test_starlette.py b/tests/integrations/starlette/test_starlette.py index 22ed10b7cb..a45c900f12 100644 --- a/tests/integrations/starlette/test_starlette.py +++ b/tests/integrations/starlette/test_starlette.py @@ -1244,6 +1244,7 @@ def test_transaction_http_method_default(sentry_init, capture_events): """ sentry_init( traces_sample_rate=1.0, + auto_enabling_integrations=False, # Make sure that httpx integration is not added, because it adds tracing information to the starlette test clients request. integrations=[StarletteIntegration()], ) events = capture_events() @@ -1269,6 +1270,7 @@ def test_transaction_http_method_default(sentry_init, capture_events): def test_transaction_http_method_custom(sentry_init, capture_events): sentry_init( traces_sample_rate=1.0, + auto_enabling_integrations=False, # Make sure that httpx integration is not added, because it adds tracing information to the starlette test clients request. integrations=[ StarletteIntegration( http_methods_to_capture=( From fccf50bb406d5be14305067564a7afdaa45295c0 Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Tue, 12 Nov 2024 16:51:57 +0100 Subject: [PATCH 097/244] Cleanup span is not None checks (#3765) --- .../integrations/opentelemetry/scope.py | 13 ++++++++++-- sentry_sdk/integrations/rq.py | 8 +++----- sentry_sdk/scope.py | 20 +++++++++++++++---- sentry_sdk/tracing.py | 5 +++++ tests/conftest.py | 3 +-- 5 files changed, 36 insertions(+), 13 deletions(-) diff --git a/sentry_sdk/integrations/opentelemetry/scope.py b/sentry_sdk/integrations/opentelemetry/scope.py index 82828d61de..56df9a774a 100644 --- a/sentry_sdk/integrations/opentelemetry/scope.py +++ b/sentry_sdk/integrations/opentelemetry/scope.py @@ -125,8 +125,17 @@ def start_span(self, **kwargs): return POTelSpan(**kwargs, scope=self) -_INITIAL_CURRENT_SCOPE = PotelScope(ty=ScopeType.CURRENT) -_INITIAL_ISOLATION_SCOPE = PotelScope(ty=ScopeType.ISOLATION) +_INITIAL_CURRENT_SCOPE = None +_INITIAL_ISOLATION_SCOPE = None + + +def _setup_initial_scopes(): + global _INITIAL_CURRENT_SCOPE, _INITIAL_ISOLATION_SCOPE + _INITIAL_CURRENT_SCOPE = PotelScope(ty=ScopeType.CURRENT) + _INITIAL_ISOLATION_SCOPE = PotelScope(ty=ScopeType.ISOLATION) + + +_setup_initial_scopes() @contextmanager diff --git a/sentry_sdk/integrations/rq.py b/sentry_sdk/integrations/rq.py index 7e016bfa9a..06eebd9f94 100644 --- a/sentry_sdk/integrations/rq.py +++ b/sentry_sdk/integrations/rq.py @@ -108,11 +108,9 @@ def sentry_patched_handle_exception(self, job, *exc_info, **kwargs): @ensure_integration_enabled(RqIntegration, old_enqueue_job) def sentry_patched_enqueue_job(self, job, **kwargs): # type: (Queue, Any, **Any) -> Any - scope = sentry_sdk.get_current_scope() - if scope.span is not None: - job.meta["_sentry_trace_headers"] = dict( - scope.iter_trace_propagation_headers() - ) + job.meta["_sentry_trace_headers"] = dict( + sentry_sdk.get_current_scope().iter_trace_propagation_headers() + ) return old_enqueue_job(self, job, **kwargs) diff --git a/sentry_sdk/scope.py b/sentry_sdk/scope.py index 2a6700b178..48b8571b98 100644 --- a/sentry_sdk/scope.py +++ b/sentry_sdk/scope.py @@ -497,7 +497,11 @@ def get_traceparent(self, *args, **kwargs): client = self.get_client() # If we have an active span, return traceparent from there - if has_tracing_enabled(client.options) and self.span is not None: + if ( + has_tracing_enabled(client.options) + and self.span is not None + and self.span.is_valid + ): return self.span.to_traceparent() # If this scope has a propagation context, return traceparent from there @@ -521,7 +525,11 @@ def get_baggage(self, *args, **kwargs): client = self.get_client() # If we have an active span, return baggage from there - if has_tracing_enabled(client.options) and self.span is not None: + if ( + has_tracing_enabled(client.options) + and self.span is not None + and self.span.is_valid + ): return self.span.to_baggage() # If this scope has a propagation context, return baggage from there @@ -610,7 +618,7 @@ def iter_trace_propagation_headers(self, *args, **kwargs): span = kwargs.pop("span", None) span = span or self.span - if has_tracing_enabled(client.options) and span is not None: + if has_tracing_enabled(client.options) and span is not None and span.is_valid: for header in span.iter_headers(): yield header else: @@ -1311,7 +1319,11 @@ def _apply_contexts_to_event(self, event, hint, options): # Add "trace" context if contexts.get("trace") is None: - if has_tracing_enabled(options) and self._span is not None: + if ( + has_tracing_enabled(options) + and self._span is not None + and self._span.is_valid + ): contexts["trace"] = self._span.get_trace_context() else: contexts["trace"] = self.get_trace_context() diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py index 70744d2d71..a69a6f98be 100644 --- a/sentry_sdk/tracing.py +++ b/sentry_sdk/tracing.py @@ -1389,6 +1389,11 @@ def span_id(self): # type: () -> str return format_span_id(self._otel_span.get_span_context().span_id) + @property + def is_valid(self): + # type: () -> bool + return self._otel_span.get_span_context().is_valid + @property def sampled(self): # type: () -> Optional[bool] diff --git a/tests/conftest.py b/tests/conftest.py index c7ade0bcdc..94fdf55707 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -74,8 +74,7 @@ def clean_scopes(): scope._isolation_scope.set(None) scope._current_scope.set(None) - potel_scope._INITIAL_CURRENT_SCOPE.clear() - potel_scope._INITIAL_ISOLATION_SCOPE.clear() + potel_scope._setup_initial_scopes() @pytest.fixture(autouse=True) From 09b4a016ed133e8f34ccece544045e8c11bf80e7 Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Wed, 13 Nov 2024 15:10:52 +0100 Subject: [PATCH 098/244] Fix aiohttp tests (#3771) Remove explicit trace_id passing --- MIGRATION_GUIDE.md | 1 + tests/integrations/aiohttp/test_aiohttp.py | 17 +++++++++++------ 2 files changed, 12 insertions(+), 6 deletions(-) diff --git a/MIGRATION_GUIDE.md b/MIGRATION_GUIDE.md index 0095cafab7..da1965f757 100644 --- a/MIGRATION_GUIDE.md +++ b/MIGRATION_GUIDE.md @@ -11,6 +11,7 @@ Looking to upgrade from Sentry SDK 2.x to 3.x? Here's a comprehensive list of wh - The SDK now supports Python 3.7 and higher. - `sentry_sdk.start_span` now only takes keyword arguments. - `sentry_sdk.start_span` no longer takes an explicit `span` argument. +- `sentry_sdk.start_span` no longer takes explicit `trace_id`, `span_id` or `parent_span_id` arguments. - The `Span()` constructor does not accept a `hub` parameter anymore. - `Span.finish()` does not accept a `hub` parameter anymore. - The `Profile()` constructor does not accept a `hub` parameter anymore. diff --git a/tests/integrations/aiohttp/test_aiohttp.py b/tests/integrations/aiohttp/test_aiohttp.py index bafb639c34..432427b08e 100644 --- a/tests/integrations/aiohttp/test_aiohttp.py +++ b/tests/integrations/aiohttp/test_aiohttp.py @@ -535,8 +535,6 @@ async def handler(request): with start_transaction( name="/interactions/other-dogs/new-dog", op="greeting.sniff", - # make trace_id difference between transactions - trace_id="0123456789012345678901234567890", ) as transaction: client = await aiohttp_client(raw_server) resp = await client.get("/") @@ -572,14 +570,21 @@ async def handler(request): with start_transaction( name="/interactions/other-dogs/new-dog", op="greeting.sniff", - trace_id="0123456789012345678901234567890", - ): + ) as transaction: client = await aiohttp_client(raw_server) resp = await client.get("/", headers={"bagGage": "custom=value"}) assert ( - resp.request_info.headers["baggage"] - == "custom=value,sentry-trace_id=0123456789012345678901234567890,sentry-environment=production,sentry-release=d08ebdb9309e1b004c6f52202de58a09c2268e42,sentry-transaction=/interactions/other-dogs/new-dog,sentry-sample_rate=1.0,sentry-sampled=true" + sorted(resp.request_info.headers["baggage"].split(",")) + == sorted([ + "custom=value", + f"sentry-trace_id={transaction.trace_id}", + "sentry-environment=production", + "sentry-release=d08ebdb9309e1b004c6f52202de58a09c2268e42", + "sentry-transaction=/interactions/other-dogs/new-dog", + "sentry-sample_rate=1.0", + "sentry-sampled=true", + ]) ) From ff7e1342d854415698d3cb6d5be13eb5e488e64a Mon Sep 17 00:00:00 2001 From: Ivana Kellyer Date: Thu, 14 Nov 2024 11:58:36 +0100 Subject: [PATCH 099/244] Sample on root span level (#3767) --- MIGRATION_GUIDE.md | 3 +- .../integrations/opentelemetry/sampler.py | 22 +++-- tests/tracing/test_sampling.py | 81 ++++++++++--------- 3 files changed, 59 insertions(+), 47 deletions(-) diff --git a/MIGRATION_GUIDE.md b/MIGRATION_GUIDE.md index da1965f757..cb81d79fc5 100644 --- a/MIGRATION_GUIDE.md +++ b/MIGRATION_GUIDE.md @@ -10,8 +10,7 @@ Looking to upgrade from Sentry SDK 2.x to 3.x? Here's a comprehensive list of wh - The SDK now supports Python 3.7 and higher. - `sentry_sdk.start_span` now only takes keyword arguments. -- `sentry_sdk.start_span` no longer takes an explicit `span` argument. -- `sentry_sdk.start_span` no longer takes explicit `trace_id`, `span_id` or `parent_span_id` arguments. +- `sentry_sdk.start_transaction`/`sentry_sdk.start_span` no longer takes the following arguments: `span`, `parent_sampled`, `trace_id`, `span_id` or `parent_span_id`. - The `Span()` constructor does not accept a `hub` parameter anymore. - `Span.finish()` does not accept a `hub` parameter anymore. - The `Profile()` constructor does not accept a `hub` parameter anymore. diff --git a/sentry_sdk/integrations/opentelemetry/sampler.py b/sentry_sdk/integrations/opentelemetry/sampler.py index 79e2ec7d8f..cb722694ac 100644 --- a/sentry_sdk/integrations/opentelemetry/sampler.py +++ b/sentry_sdk/integrations/opentelemetry/sampler.py @@ -120,20 +120,29 @@ def should_sample( if not has_tracing_enabled(client.options): return dropped_result(parent_span_context, attributes) + # parent_span_context.is_valid means this span has a parent, remote or local + is_root_span = not parent_span_context.is_valid or parent_span_context.is_remote + # Explicit sampled value provided at start_span if attributes.get(SentrySpanAttribute.CUSTOM_SAMPLED) is not None: - sample_rate = float(attributes[SentrySpanAttribute.CUSTOM_SAMPLED]) - if sample_rate > 0: - return sampled_result(parent_span_context, attributes, sample_rate) + if is_root_span: + sample_rate = float(attributes[SentrySpanAttribute.CUSTOM_SAMPLED]) + if sample_rate > 0: + return sampled_result(parent_span_context, attributes, sample_rate) + else: + return dropped_result(parent_span_context, attributes) else: - return dropped_result(parent_span_context, attributes) + logger.debug( + f"[Tracing] Ignoring sampled param for non-root span {name}" + ) sample_rate = None # Check if there is a traces_sampler # Traces_sampler is responsible to check parent sampled to have full transactions. has_traces_sampler = callable(client.options.get("traces_sampler")) - if has_traces_sampler: + + if is_root_span and has_traces_sampler: sampling_context = { "transaction_context": { "name": name, @@ -161,8 +170,7 @@ def should_sample( return dropped_result(parent_span_context, attributes) # Down-sample in case of back pressure monitor says so - # TODO: this should only be done for transactions (aka root spans) - if client.monitor: + if is_root_span and client.monitor: sample_rate /= 2**client.monitor.downsample_factor # Roll the dice on sample rate diff --git a/tests/tracing/test_sampling.py b/tests/tracing/test_sampling.py index 70baacc951..8ef362a1e8 100644 --- a/tests/tracing/test_sampling.py +++ b/tests/tracing/test_sampling.py @@ -6,34 +6,33 @@ import sentry_sdk from sentry_sdk import start_span, start_transaction, capture_exception -from sentry_sdk.tracing import Transaction from sentry_sdk.utils import logger -def test_sampling_decided_only_for_transactions(sentry_init, capture_events): +def test_sampling_decided_only_for_root_spans(sentry_init): sentry_init(traces_sample_rate=0.5) - with start_transaction(name="hi") as transaction: - assert transaction.sampled is not None + with start_span(name="outer1") as root_span1: + assert root_span1.sampled is not None - with start_span() as span: - assert span.sampled == transaction.sampled + with start_span(name="inner") as span: + assert span.sampled == root_span1.sampled - with start_span() as span: - assert span.sampled is None + with start_span(name="outer2") as root_span2: + assert root_span2.sampled is not None @pytest.mark.parametrize("sampled", [True, False]) -def test_nested_transaction_sampling_override(sentry_init, sampled): +def test_nested_span_sampling_override(sentry_init, sampled): sentry_init(traces_sample_rate=1.0) - with start_transaction(name="outer", sampled=sampled) as outer_transaction: - assert outer_transaction.sampled is sampled - with start_transaction( - name="inner", sampled=(not sampled) - ) as inner_transaction: - assert inner_transaction.sampled is not sampled - assert outer_transaction.sampled is sampled + with start_span(name="outer", sampled=sampled) as outer_span: + assert outer_span.sampled is sampled + with start_span(name="inner", sampled=(not sampled)) as inner_span: + # won't work because the child span inherits the sampling decision + # from the parent + assert inner_span.sampled is sampled + assert outer_span.sampled is sampled def test_no_double_sampling(sentry_init, capture_events): @@ -147,10 +146,17 @@ def test_ignores_inherited_sample_decision_when_traces_sampler_defined( traces_sampler = mock.Mock(return_value=not parent_sampling_decision) sentry_init(traces_sampler=traces_sampler) - transaction = start_transaction( - name="dogpark", parent_sampled=parent_sampling_decision + sentry_trace_header = ( + "12312012123120121231201212312012-1121201211212012-{sampled}".format( + sampled=int(parent_sampling_decision) + ) ) - assert transaction.sampled is not parent_sampling_decision + + with sentry_sdk.continue_trace({"sentry-trace": sentry_trace_header}): + with sentry_sdk.start_span(name="dogpark") as span: + pass + + assert span.sampled is not parent_sampling_decision @pytest.mark.parametrize("explicit_decision", [True, False]) @@ -176,18 +182,28 @@ def test_inherits_parent_sampling_decision_when_traces_sampler_undefined( sentry_init(traces_sample_rate=0.5) mock_random_value = 0.25 if parent_sampling_decision is False else 0.75 - with mock.patch.object(random, "random", return_value=mock_random_value): - transaction = start_transaction( - name="dogpark", parent_sampled=parent_sampling_decision + sentry_trace_header = ( + "12312012123120121231201212312012-1121201211212012-{sampled}".format( + sampled=int(parent_sampling_decision) ) - assert transaction.sampled is parent_sampling_decision + ) + with mock.patch.object(random, "random", return_value=mock_random_value): + with sentry_sdk.continue_trace({"sentry-trace": sentry_trace_header}): + with start_span(name="dogpark") as span: + pass + + assert span.sampled is parent_sampling_decision @pytest.mark.parametrize("parent_sampling_decision", [True, False]) def test_passes_parent_sampling_decision_in_sampling_context( sentry_init, parent_sampling_decision ): - sentry_init(traces_sample_rate=1.0) + def dummy_traces_sampler(sampling_context): + assert sampling_context["parent_sampled"] is parent_sampling_decision + return 1.0 + + sentry_init(traces_sample_rate=1.0, traces_sampler=dummy_traces_sampler) sentry_trace_header = ( "12312012123120121231201212312012-1121201211212012-{sampled}".format( @@ -195,20 +211,9 @@ def test_passes_parent_sampling_decision_in_sampling_context( ) ) - transaction = Transaction.continue_from_headers( - headers={"sentry-trace": sentry_trace_header}, name="dogpark" - ) - spy = mock.Mock(wraps=transaction) - start_transaction(transaction=spy) - - # there's only one call (so index at 0) and kwargs are always last in a call - # tuple (so index at -1) - sampling_context = spy._set_initial_sampling_decision.mock_calls[0][-1][ - "sampling_context" - ] - assert "parent_sampled" in sampling_context - # because we passed in a spy, attribute access requires unwrapping - assert sampling_context["parent_sampled"]._mock_wraps is parent_sampling_decision + with sentry_sdk.continue_trace({"sentry-trace": sentry_trace_header}): + with sentry_sdk.start_span(name="dogpark"): + pass def test_passes_attributes_from_start_span_to_traces_sampler( From 13ec94fa1d4e38c11a933dc88f21056dff062843 Mon Sep 17 00:00:00 2001 From: Ivana Kellyer Date: Thu, 14 Nov 2024 12:18:55 +0100 Subject: [PATCH 100/244] Unpack custom_sampling_context into attributes in ASGI (#3764) --- MIGRATION_GUIDE.md | 1 + sentry_sdk/integrations/asgi.py | 15 ++++++++++++++- .../integrations/opentelemetry/sampler.py | 1 + sentry_sdk/tracing.py | 2 +- tests/integrations/asgi/test_asgi.py | 17 +++++++++++++++++ 5 files changed, 34 insertions(+), 2 deletions(-) diff --git a/MIGRATION_GUIDE.md b/MIGRATION_GUIDE.md index cb81d79fc5..8d8ee0d682 100644 --- a/MIGRATION_GUIDE.md +++ b/MIGRATION_GUIDE.md @@ -20,6 +20,7 @@ Looking to upgrade from Sentry SDK 2.x to 3.x? Here's a comprehensive list of wh - clickhouse-driver integration: The query is now available under the `db.query.text` span attribute (only if `send_default_pii` is `True`). - `sentry_sdk.init` now returns `None` instead of a context manager. - The `sampling_context` argument of `traces_sampler` now additionally contains all span attributes known at span start. +- The `sampling_context` argument of `traces_sampler` doesn't contain the `asgi_scope` object anymore for ASGI frameworks. Instead, the individual properties, if available, are accessible as `asgi_scope.endpoint`, `asgi_scope.path`, `asgi_scope.root_path`, `asgi_scope.route`, `asgi_scope.scheme`, `asgi_scope.server` and `asgi_scope.type`. ### Removed diff --git a/sentry_sdk/integrations/asgi.py b/sentry_sdk/integrations/asgi.py index f67c47ef02..73801ed102 100644 --- a/sentry_sdk/integrations/asgi.py +++ b/sentry_sdk/integrations/asgi.py @@ -209,7 +209,7 @@ async def _run_app(self, scope, receive, send, asgi_version): name=transaction_name, source=transaction_source, origin=self.span_origin, - custom_sampling_context={"asgi_scope": scope}, + attributes=_prepopulate_attributes(scope), ) if should_trace else nullcontext() @@ -324,3 +324,16 @@ def _get_transaction_name_and_source(self, transaction_style, asgi_scope): return name, source return name, source + + +def _prepopulate_attributes(scope): + # type: (Any) -> dict[str, Any] + """Unpack asgi_scope into serializable attributes.""" + scope = scope or {} + + attributes = {} + for attr in ("endpoint", "path", "root_path", "route", "scheme", "server", "type"): + if scope.get(attr): + attributes[f"asgi_scope.{attr}"] = scope[attr] + + return attributes diff --git a/sentry_sdk/integrations/opentelemetry/sampler.py b/sentry_sdk/integrations/opentelemetry/sampler.py index cb722694ac..302b66aaaa 100644 --- a/sentry_sdk/integrations/opentelemetry/sampler.py +++ b/sentry_sdk/integrations/opentelemetry/sampler.py @@ -147,6 +147,7 @@ def should_sample( "transaction_context": { "name": name, "op": attributes.get(SentrySpanAttribute.OP), + "source": attributes.get(SentrySpanAttribute.SOURCE), }, "parent_sampled": get_parent_sampled(parent_span_context, trace_id), } diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py index a69a6f98be..a571f3f84c 100644 --- a/sentry_sdk/tracing.py +++ b/sentry_sdk/tracing.py @@ -1250,6 +1250,7 @@ def __init__( # Prepopulate some attrs so that they're accessible in traces_sampler attributes = attributes or {} attributes[SentrySpanAttribute.OP] = op + attributes[SentrySpanAttribute.SOURCE] = source if sampled is not None: attributes[SentrySpanAttribute.CUSTOM_SAMPLED] = sampled @@ -1260,7 +1261,6 @@ def __init__( self.origin = origin or DEFAULT_SPAN_ORIGIN self.description = description self.name = span_name - self.source = source if status is not None: self.set_status(status) diff --git a/tests/integrations/asgi/test_asgi.py b/tests/integrations/asgi/test_asgi.py index e0a3900a38..fb97c385a0 100644 --- a/tests/integrations/asgi/test_asgi.py +++ b/tests/integrations/asgi/test_asgi.py @@ -721,3 +721,20 @@ async def test_custom_transaction_name( assert transaction_event["type"] == "transaction" assert transaction_event["transaction"] == "foobar" assert transaction_event["transaction_info"] == {"source": "custom"} + + +@pytest.mark.asyncio +async def test_asgi_scope_in_traces_sampler(sentry_init, asgi3_app): + def dummy_traces_sampler(sampling_context): + assert sampling_context["asgi_scope.path"] == "/test" + assert sampling_context["asgi_scope.scheme"] == "http" + + sentry_init( + traces_sampler=dummy_traces_sampler, + traces_sample_rate=1.0, + ) + + app = SentryAsgiMiddleware(asgi3_app) + + async with TestClient(app) as client: + await client.get("/test") From 6c6ac096203cac0888efc5e91a2badb7e1120fef Mon Sep 17 00:00:00 2001 From: Ivana Kellyer Date: Thu, 14 Nov 2024 12:31:04 +0100 Subject: [PATCH 101/244] Use semantic attributes in traces sampler for ASGI spans (#3774) --- MIGRATION_GUIDE.md | 13 +++++- sentry_sdk/integrations/_asgi_common.py | 4 +- sentry_sdk/integrations/asgi.py | 48 +++++++++++++++++----- tests/integrations/asgi/test_asgi.py | 10 +++-- tests/integrations/fastapi/test_fastapi.py | 3 -- 5 files changed, 58 insertions(+), 20 deletions(-) diff --git a/MIGRATION_GUIDE.md b/MIGRATION_GUIDE.md index 8d8ee0d682..168de1cfcb 100644 --- a/MIGRATION_GUIDE.md +++ b/MIGRATION_GUIDE.md @@ -20,7 +20,18 @@ Looking to upgrade from Sentry SDK 2.x to 3.x? Here's a comprehensive list of wh - clickhouse-driver integration: The query is now available under the `db.query.text` span attribute (only if `send_default_pii` is `True`). - `sentry_sdk.init` now returns `None` instead of a context manager. - The `sampling_context` argument of `traces_sampler` now additionally contains all span attributes known at span start. -- The `sampling_context` argument of `traces_sampler` doesn't contain the `asgi_scope` object anymore for ASGI frameworks. Instead, the individual properties, if available, are accessible as `asgi_scope.endpoint`, `asgi_scope.path`, `asgi_scope.root_path`, `asgi_scope.route`, `asgi_scope.scheme`, `asgi_scope.server` and `asgi_scope.type`. +- The `sampling_context` argument of `traces_sampler` doesn't contain the `asgi_scope` object anymore for ASGI frameworks. Instead, the individual properties on the scope, if available, are accessible as follows: + + | Scope property | Sampling context key(s) | + | -------------- | ------------------------------- | + | `type` | `network.protocol.name` | + | `scheme` | `url.scheme` | + | `path` | `url.path` | + | `http_version` | `network.protocol.version` | + | `method` | `http.request.method` | + | `server` | `server.address`, `server.port` | + | `client` | `client.address`, `client.port` | + | full URL | `url.full` | ### Removed diff --git a/sentry_sdk/integrations/_asgi_common.py b/sentry_sdk/integrations/_asgi_common.py index ca030d6f45..52ecdbfd58 100644 --- a/sentry_sdk/integrations/_asgi_common.py +++ b/sentry_sdk/integrations/_asgi_common.py @@ -32,8 +32,8 @@ def _get_headers(asgi_scope): return headers -def _get_url(asgi_scope, default_scheme, host): - # type: (Dict[str, Any], Literal["ws", "http"], Optional[Union[AnnotatedValue, str]]) -> str +def _get_url(asgi_scope, default_scheme=None, host=None): + # type: (Dict[str, Any], Optional[Literal["ws", "http"]], Optional[Union[AnnotatedValue, str]]) -> str """ Extract URL from the ASGI scope, without also including the querystring. """ diff --git a/sentry_sdk/integrations/asgi.py b/sentry_sdk/integrations/asgi.py index 73801ed102..b2ecfe23b7 100644 --- a/sentry_sdk/integrations/asgi.py +++ b/sentry_sdk/integrations/asgi.py @@ -15,6 +15,7 @@ from sentry_sdk.integrations._asgi_common import ( _get_headers, + _get_query, _get_request_data, _get_url, ) @@ -57,6 +58,14 @@ TRANSACTION_STYLE_VALUES = ("endpoint", "url") +ASGI_SCOPE_PROPERTY_TO_ATTRIBUTE = { + "http_version": "network.protocol.version", + "method": "http.request.method", + "path": "url.path", + "scheme": "url.scheme", + "type": "network.protocol.name", +} + def _capture_exception(exc, mechanism_type="asgi"): # type: (Any, str) -> None @@ -213,23 +222,21 @@ async def _run_app(self, scope, receive, send, asgi_version): ) if should_trace else nullcontext() - ) as transaction: - if transaction is not None: - logger.debug( - "[ASGI] Started transaction: %s", transaction - ) - transaction.set_tag("asgi.type", ty) + ) as span: + if span is not None: + logger.debug("[ASGI] Started transaction: %s", span) + span.set_tag("asgi.type", ty) try: async def _sentry_wrapped_send(event): # type: (Dict[str, Any]) -> Any is_http_response = ( event.get("type") == "http.response.start" - and transaction is not None + and span is not None and "status" in event ) if is_http_response: - transaction.set_http_status(event["status"]) + span.set_http_status(event["status"]) return await send(event) @@ -328,12 +335,31 @@ def _get_transaction_name_and_source(self, transaction_style, asgi_scope): def _prepopulate_attributes(scope): # type: (Any) -> dict[str, Any] - """Unpack asgi_scope into serializable attributes.""" + """Unpack ASGI scope into serializable OTel attributes.""" scope = scope or {} attributes = {} - for attr in ("endpoint", "path", "root_path", "route", "scheme", "server", "type"): + for attr, key in ASGI_SCOPE_PROPERTY_TO_ATTRIBUTE.items(): if scope.get(attr): - attributes[f"asgi_scope.{attr}"] = scope[attr] + attributes[key] = scope[attr] + + for attr in ("client", "server"): + if scope.get(attr): + try: + host, port = scope[attr] + attributes[f"{attr}.address"] = host + attributes[f"{attr}.port"] = port + except Exception: + pass + + try: + full_url = _get_url(scope) + query = _get_query(scope) + if query: + full_url = f"{full_url}?{query}" + + attributes["url.full"] = full_url + except Exception: + pass return attributes diff --git a/tests/integrations/asgi/test_asgi.py b/tests/integrations/asgi/test_asgi.py index fb97c385a0..74f6d8cc49 100644 --- a/tests/integrations/asgi/test_asgi.py +++ b/tests/integrations/asgi/test_asgi.py @@ -726,8 +726,12 @@ async def test_custom_transaction_name( @pytest.mark.asyncio async def test_asgi_scope_in_traces_sampler(sentry_init, asgi3_app): def dummy_traces_sampler(sampling_context): - assert sampling_context["asgi_scope.path"] == "/test" - assert sampling_context["asgi_scope.scheme"] == "http" + assert sampling_context["url.path"] == "/test" + assert sampling_context["url.scheme"] == "http" + assert sampling_context["url.full"] == "/test?hello=there" + assert sampling_context["http.request.method"] == "GET" + assert sampling_context["network.protocol.version"] == "1.1" + assert sampling_context["network.protocol.name"] == "http" sentry_init( traces_sampler=dummy_traces_sampler, @@ -737,4 +741,4 @@ def dummy_traces_sampler(sampling_context): app = SentryAsgiMiddleware(asgi3_app) async with TestClient(app) as client: - await client.get("/test") + await client.get("/test?hello=there") diff --git a/tests/integrations/fastapi/test_fastapi.py b/tests/integrations/fastapi/test_fastapi.py index 97aea06344..b425ceebe6 100644 --- a/tests/integrations/fastapi/test_fastapi.py +++ b/tests/integrations/fastapi/test_fastapi.py @@ -246,7 +246,6 @@ async def _error(request: Request): assert event["request"]["headers"]["authorization"] == "[Filtered]" -@pytest.mark.asyncio def test_response_status_code_ok_in_transaction_context(sentry_init, capture_envelopes): """ Tests that the response status code is added to the transaction "response" context. @@ -275,7 +274,6 @@ def test_response_status_code_ok_in_transaction_context(sentry_init, capture_env assert transaction["contexts"]["response"]["status_code"] == 200 -@pytest.mark.asyncio def test_response_status_code_error_in_transaction_context( sentry_init, capture_envelopes, @@ -312,7 +310,6 @@ def test_response_status_code_error_in_transaction_context( assert transaction["contexts"]["response"]["status_code"] == 500 -@pytest.mark.asyncio def test_response_status_code_not_found_in_transaction_context( sentry_init, capture_envelopes, From 0011e22bac179736fa6beacf96b0cba439a8d2bf Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Thu, 14 Nov 2024 15:46:18 +0100 Subject: [PATCH 102/244] Fix remote case for only_if_parent (#3777) --- sentry_sdk/tracing.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py index a571f3f84c..6d24f4d10a 100644 --- a/sentry_sdk/tracing.py +++ b/sentry_sdk/tracing.py @@ -1228,9 +1228,11 @@ def __init__( if otel_span is not None: self._otel_span = otel_span else: - skip_span = ( - only_if_parent and not get_current_span().get_span_context().is_valid - ) + skip_span = False + if only_if_parent: + parent_span_context = get_current_span().get_span_context() + skip_span = not parent_span_context.is_valid or parent_span_context.is_remote + if skip_span: self._otel_span = INVALID_SPAN else: From e8c181368a5c0c5b128afc8805186f5913a3fd55 Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Thu, 14 Nov 2024 18:39:51 +0100 Subject: [PATCH 103/244] Remove sampled setter and fix sanic behavior with an event processor (#3779) --- MIGRATION_GUIDE.md | 1 + sentry_sdk/integrations/sanic.py | 23 ++++++++++++++++------- sentry_sdk/tracing.py | 9 +++------ tests/integrations/sanic/test_sanic.py | 10 ++++++---- 4 files changed, 26 insertions(+), 17 deletions(-) diff --git a/MIGRATION_GUIDE.md b/MIGRATION_GUIDE.md index 168de1cfcb..e2f3dd8803 100644 --- a/MIGRATION_GUIDE.md +++ b/MIGRATION_GUIDE.md @@ -11,6 +11,7 @@ Looking to upgrade from Sentry SDK 2.x to 3.x? Here's a comprehensive list of wh - The SDK now supports Python 3.7 and higher. - `sentry_sdk.start_span` now only takes keyword arguments. - `sentry_sdk.start_transaction`/`sentry_sdk.start_span` no longer takes the following arguments: `span`, `parent_sampled`, `trace_id`, `span_id` or `parent_span_id`. +- You can no longer change the sampled status of a span with `span.sampled = False` after starting it. - The `Span()` constructor does not accept a `hub` parameter anymore. - `Span.finish()` does not accept a `hub` parameter anymore. - The `Profile()` constructor does not accept a `hub` parameter anymore. diff --git a/sentry_sdk/integrations/sanic.py b/sentry_sdk/integrations/sanic.py index 7b49f5eaf3..0854c3dbee 100644 --- a/sentry_sdk/integrations/sanic.py +++ b/sentry_sdk/integrations/sanic.py @@ -187,8 +187,11 @@ async def _context_enter(request): return weak_request = weakref.ref(request) - request.ctx._sentry_scope = sentry_sdk.isolation_scope() - scope = request.ctx._sentry_scope.__enter__() + request.ctx._sentry_scope_manager = sentry_sdk.isolation_scope() + scope = request.ctx._sentry_scope_manager.__enter__() + request.ctx._sentry_scope = scope + + scope.set_transaction_name(request.path, TRANSACTION_SOURCE_URL) scope.clear_breadcrumbs() scope.add_event_processor(_make_request_processor(weak_request)) @@ -197,7 +200,7 @@ async def _context_enter(request): dict(request.headers) ) request.ctx._sentry_continue_trace.__enter__() - request.ctx._sentry_transaction = sentry_sdk.start_transaction( + request.ctx._sentry_transaction = sentry_sdk.start_span( op=OP.HTTP_SERVER, # Unless the request results in a 404 error, the name and source will get overwritten in _set_transaction name=request.path, @@ -220,14 +223,20 @@ async def _context_exit(request, response=None): # happens while trying to end the transaction, we still attempt to exit the scope. with capture_internal_exceptions(): request.ctx._sentry_transaction.set_http_status(response_status) - request.ctx._sentry_transaction.sampled &= ( + + if ( isinstance(integration, SanicIntegration) - and response_status not in integration._unsampled_statuses - ) + and response_status in integration._unsampled_statuses + ): + # drop the event in an event processor + request.ctx._sentry_scope.add_event_processor( + lambda _event, _hint: None + ) + request.ctx._sentry_transaction.__exit__(None, None, None) request.ctx._sentry_continue_trace.__exit__(None, None, None) - request.ctx._sentry_scope.__exit__(None, None, None) + request.ctx._sentry_scope_manager.__exit__(None, None, None) async def _set_transaction(request, route, **_): diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py index 6d24f4d10a..59971e274e 100644 --- a/sentry_sdk/tracing.py +++ b/sentry_sdk/tracing.py @@ -1231,7 +1231,9 @@ def __init__( skip_span = False if only_if_parent: parent_span_context = get_current_span().get_span_context() - skip_span = not parent_span_context.is_valid or parent_span_context.is_remote + skip_span = ( + not parent_span_context.is_valid or parent_span_context.is_remote + ) if skip_span: self._otel_span = INVALID_SPAN @@ -1401,11 +1403,6 @@ def sampled(self): # type: () -> Optional[bool] return self._otel_span.get_span_context().trace_flags.sampled - @sampled.setter - def sampled(self, value): - # type: (Optional[bool]) -> None - pass - @property def op(self): # type: () -> Optional[str] diff --git a/tests/integrations/sanic/test_sanic.py b/tests/integrations/sanic/test_sanic.py index 9d95907144..a3fc5a7652 100644 --- a/tests/integrations/sanic/test_sanic.py +++ b/tests/integrations/sanic/test_sanic.py @@ -346,8 +346,9 @@ def __init__( expected_status, expected_transaction_name, expected_source=None, + has_transaction_event=True, ): - # type: (Iterable[Optional[Container[int]]], str, int, Optional[str], Optional[str]) -> None + # type: (Iterable[Optional[Container[int]]], str, int, Optional[str], Optional[str], bool) -> None """ expected_transaction_name of None indicates we expect to not receive a transaction """ @@ -356,6 +357,7 @@ def __init__( self.expected_status = expected_status self.expected_transaction_name = expected_transaction_name self.expected_source = expected_source + self.has_transaction_event = has_transaction_event @pytest.mark.skipif( @@ -386,6 +388,7 @@ def __init__( url="/404", expected_status=404, expected_transaction_name=None, + has_transaction_event=False, ), TransactionTestConfig( # With no ignored HTTP statuses, we should get transactions for 404 errors @@ -401,6 +404,7 @@ def __init__( url="/message", expected_status=200, expected_transaction_name=None, + has_transaction_event=False, ), ], ) @@ -430,9 +434,7 @@ def test_transactions(test_config, sentry_init, app, capture_events): (transaction_event, *_) = [*transaction_events, None] # We should have no transaction event if and only if we expect no transactions - assert (transaction_event is None) == ( - test_config.expected_transaction_name is None - ) + assert bool(transaction_event) == test_config.has_transaction_event # If a transaction was expected, ensure it is correct assert ( From 571c5cdc861631d27d0a88b80183d8351804c4ea Mon Sep 17 00:00:00 2001 From: Ivana Kellyer Date: Fri, 15 Nov 2024 10:45:53 +0100 Subject: [PATCH 104/244] Unpack WSGI environ into span attrs accessible in traces sampler (#3775) --- MIGRATION_GUIDE.md | 16 +++++++++++- sentry_sdk/integrations/asgi.py | 1 + sentry_sdk/integrations/wsgi.py | 39 +++++++++++++++++++++++++++- tests/integrations/asgi/test_asgi.py | 1 + tests/integrations/wsgi/test_wsgi.py | 26 ++++++++----------- 5 files changed, 66 insertions(+), 17 deletions(-) diff --git a/MIGRATION_GUIDE.md b/MIGRATION_GUIDE.md index e2f3dd8803..da84dc1758 100644 --- a/MIGRATION_GUIDE.md +++ b/MIGRATION_GUIDE.md @@ -21,13 +21,27 @@ Looking to upgrade from Sentry SDK 2.x to 3.x? Here's a comprehensive list of wh - clickhouse-driver integration: The query is now available under the `db.query.text` span attribute (only if `send_default_pii` is `True`). - `sentry_sdk.init` now returns `None` instead of a context manager. - The `sampling_context` argument of `traces_sampler` now additionally contains all span attributes known at span start. -- The `sampling_context` argument of `traces_sampler` doesn't contain the `asgi_scope` object anymore for ASGI frameworks. Instead, the individual properties on the scope, if available, are accessible as follows: +- The `sampling_context` argument of `traces_sampler` doesn't contain the `wsgi_environ` object anymore for WSGI frameworks. Instead, the individual properties of the environment are accessible, if available, as follows: + + | Env property | Sampling context key(s) | + | ----------------- | ------------------------------------------------- | + | `PATH_INFO` | `url.path` | + | `QUERY_STRING` | `url.query` | + | `REQUEST_METHOD` | `http.request.method` | + | `SERVER_NAME` | `server.address` | + | `SERVER_PORT` | `server.port` | + | `SERVER_PROTOCOL` | `server.protocol.name`, `server.protocol.version` | + | `wsgi.url_scheme` | `url.scheme` | + | full URL | `url.full` | + +- The `sampling_context` argument of `traces_sampler` doesn't contain the `asgi_scope` object anymore for ASGI frameworks. Instead, the individual properties of the scope, if available, are accessible as follows: | Scope property | Sampling context key(s) | | -------------- | ------------------------------- | | `type` | `network.protocol.name` | | `scheme` | `url.scheme` | | `path` | `url.path` | + | `query` | `url.query` | | `http_version` | `network.protocol.version` | | `method` | `http.request.method` | | `server` | `server.address`, `server.port` | diff --git a/sentry_sdk/integrations/asgi.py b/sentry_sdk/integrations/asgi.py index b2ecfe23b7..80c24b8cb6 100644 --- a/sentry_sdk/integrations/asgi.py +++ b/sentry_sdk/integrations/asgi.py @@ -356,6 +356,7 @@ def _prepopulate_attributes(scope): full_url = _get_url(scope) query = _get_query(scope) if query: + attributes["url.query"] = query full_url = f"{full_url}?{query}" attributes["url.full"] = full_url diff --git a/sentry_sdk/integrations/wsgi.py b/sentry_sdk/integrations/wsgi.py index 3aebff17d5..70324a3641 100644 --- a/sentry_sdk/integrations/wsgi.py +++ b/sentry_sdk/integrations/wsgi.py @@ -48,6 +48,15 @@ def __call__(self, status, response_headers, exc_info=None): # type: ignore DEFAULT_TRANSACTION_NAME = "generic WSGI request" +ENVIRON_TO_ATTRIBUTE = { + "PATH_INFO": "url.path", + "QUERY_STRING": "url.query", + "REQUEST_METHOD": "http.request.method", + "SERVER_NAME": "server.address", + "SERVER_PORT": "server.port", + "wsgi.url_scheme": "url.scheme", +} + def wsgi_decoding_dance(s, charset="utf-8", errors="replace"): # type: (str, str, str) -> str @@ -120,7 +129,9 @@ def __call__(self, environ, start_response): name=DEFAULT_TRANSACTION_NAME, source=TRANSACTION_SOURCE_ROUTE, origin=self.span_origin, - custom_sampling_context={"wsgi_environ": environ}, + attributes=_prepopulate_attributes( + environ, self.use_x_forwarded_for + ), ) if should_trace else nullcontext() @@ -309,3 +320,29 @@ def event_processor(event, hint): return event return event_processor + + +def _prepopulate_attributes(wsgi_environ, use_x_forwarded_for=False): + """Extract span attributes from the WSGI environment.""" + attributes = {} + + for property, attr in ENVIRON_TO_ATTRIBUTE.items(): + if wsgi_environ.get(property) is not None: + attributes[attr] = wsgi_environ[property] + + if wsgi_environ.get("SERVER_PROTOCOL") is not None: + try: + proto, version = wsgi_environ["SERVER_PROTOCOL"].split("/") + attributes["network.protocol.name"] = proto + attributes["network.protocol.version"] = version + except Exception: + attributes["network.protocol.name"] = wsgi_environ["SERVER_PROTOCOL"] + + try: + url = get_request_url(wsgi_environ, use_x_forwarded_for) + query = wsgi_environ.get("QUERY_STRING") + attributes["url.full"] = f"{url}?{query}" + except Exception: + pass + + return attributes diff --git a/tests/integrations/asgi/test_asgi.py b/tests/integrations/asgi/test_asgi.py index 74f6d8cc49..adfd798c72 100644 --- a/tests/integrations/asgi/test_asgi.py +++ b/tests/integrations/asgi/test_asgi.py @@ -728,6 +728,7 @@ async def test_asgi_scope_in_traces_sampler(sentry_init, asgi3_app): def dummy_traces_sampler(sampling_context): assert sampling_context["url.path"] == "/test" assert sampling_context["url.scheme"] == "http" + assert sampling_context["url.query"] == "hello=there" assert sampling_context["url.full"] == "/test?hello=there" assert sampling_context["http.request.method"] == "GET" assert sampling_context["network.protocol.version"] == "1.1" diff --git a/tests/integrations/wsgi/test_wsgi.py b/tests/integrations/wsgi/test_wsgi.py index 656fc1757f..0652a775d7 100644 --- a/tests/integrations/wsgi/test_wsgi.py +++ b/tests/integrations/wsgi/test_wsgi.py @@ -334,25 +334,21 @@ def app(environ, start_response): start_response("200 OK", []) return ["Go get the ball! Good dog!"] - traces_sampler = mock.Mock(return_value=True) + def traces_sampler(sampling_context): + assert sampling_context["http.request.method"] == "GET" + assert sampling_context["url.path"] == "/dogs/are/great/" + assert sampling_context["url.query"] == "cats=too" + assert sampling_context["url.scheme"] == "http" + assert ( + sampling_context["url.full"] == "http://localhost/dogs/are/great/?cats=too" + ) + return True + sentry_init(send_default_pii=True, traces_sampler=traces_sampler) app = SentryWsgiMiddleware(app) client = Client(app) - client.get("/dogs/are/great/") - - traces_sampler.assert_any_call( - DictionaryContaining( - { - "wsgi_environ": DictionaryContaining( - { - "PATH_INFO": "/dogs/are/great/", - "REQUEST_METHOD": "GET", - }, - ), - } - ) - ) + client.get("/dogs/are/great/?cats=too") def test_session_mode_defaults_to_request_mode_in_wsgi_handler( From 1dc4c28b23917d608223d3b948545d3a6ea57151 Mon Sep 17 00:00:00 2001 From: Ivana Kellyer Date: Fri, 15 Nov 2024 12:48:21 +0100 Subject: [PATCH 105/244] Extract span attrs from AIOHTTP request (#3782) --- MIGRATION_GUIDE.md | 15 +++++- sentry_sdk/integrations/aiohttp.py | 46 ++++++++++++++++--- .../integrations/opentelemetry/sampler.py | 1 - tests/integrations/aiohttp/test_aiohttp.py | 40 ++++++++-------- 4 files changed, 74 insertions(+), 28 deletions(-) diff --git a/MIGRATION_GUIDE.md b/MIGRATION_GUIDE.md index da84dc1758..fce361a9ec 100644 --- a/MIGRATION_GUIDE.md +++ b/MIGRATION_GUIDE.md @@ -21,7 +21,18 @@ Looking to upgrade from Sentry SDK 2.x to 3.x? Here's a comprehensive list of wh - clickhouse-driver integration: The query is now available under the `db.query.text` span attribute (only if `send_default_pii` is `True`). - `sentry_sdk.init` now returns `None` instead of a context manager. - The `sampling_context` argument of `traces_sampler` now additionally contains all span attributes known at span start. -- The `sampling_context` argument of `traces_sampler` doesn't contain the `wsgi_environ` object anymore for WSGI frameworks. Instead, the individual properties of the environment are accessible, if available, as follows: +- If you're using the AIOHTTP integration, the `sampling_context` argument of `traces_sampler` doesn't contain the `aiohttp_request` object anymore. Instead, some of the individual properties of the request are accessible, if available, as follows: + + | Request property | Sampling context key(s) | + | ---------------- | ------------------------------- | + | `path` | `url.path` | + | `query_string` | `url.query` | + | `method` | `http.request.method` | + | `host` | `server.address`, `server.port` | + | `scheme` | `url.scheme` | + | full URL | `url.full` | + +- If you're using the generic WSGI integration, the `sampling_context` argument of `traces_sampler` doesn't contain the `wsgi_environ` object anymore. Instead, the individual properties of the environment are accessible, if available, as follows: | Env property | Sampling context key(s) | | ----------------- | ------------------------------------------------- | @@ -34,7 +45,7 @@ Looking to upgrade from Sentry SDK 2.x to 3.x? Here's a comprehensive list of wh | `wsgi.url_scheme` | `url.scheme` | | full URL | `url.full` | -- The `sampling_context` argument of `traces_sampler` doesn't contain the `asgi_scope` object anymore for ASGI frameworks. Instead, the individual properties of the scope, if available, are accessible as follows: +- If you're using the generic ASGI integration, the `sampling_context` argument of `traces_sampler` doesn't contain the `asgi_scope` object anymore. Instead, the individual properties of the scope, if available, are accessible as follows: | Scope property | Sampling context key(s) | | -------------- | ------------------------------- | diff --git a/sentry_sdk/integrations/aiohttp.py b/sentry_sdk/integrations/aiohttp.py index 0928c14c8b..9257eca49a 100644 --- a/sentry_sdk/integrations/aiohttp.py +++ b/sentry_sdk/integrations/aiohttp.py @@ -65,6 +65,13 @@ TRANSACTION_STYLE_VALUES = ("handler_name", "method_and_path_pattern") +REQUEST_PROPERTY_TO_ATTRIBUTE = { + "query_string": "url.query", + "method": "http.request.method", + "scheme": "url.scheme", + "path": "url.path", +} + class AioHttpIntegration(Integration): identifier = "aiohttp" @@ -127,19 +134,19 @@ async def sentry_app_handle(self, request, *args, **kwargs): headers = dict(request.headers) with sentry_sdk.continue_trace(headers): - with sentry_sdk.start_transaction( + with sentry_sdk.start_span( op=OP.HTTP_SERVER, # If this transaction name makes it to the UI, AIOHTTP's # URL resolver did not find a route or died trying. name="generic AIOHTTP request", source=TRANSACTION_SOURCE_ROUTE, origin=AioHttpIntegration.origin, - custom_sampling_context={"aiohttp_request": request}, - ) as transaction: + attributes=_prepopulate_attributes(request), + ) as span: try: response = await old_handle(self, request) except HTTPException as e: - transaction.set_http_status(e.status_code) + span.set_http_status(e.status_code) if ( e.status_code @@ -149,14 +156,14 @@ async def sentry_app_handle(self, request, *args, **kwargs): raise except (asyncio.CancelledError, ConnectionResetError): - transaction.set_status(SPANSTATUS.CANCELLED) + span.set_status(SPANSTATUS.CANCELLED) raise except Exception: # This will probably map to a 500 but seems like we # have no way to tell. Do not set span status. reraise(*_capture_exception()) - transaction.set_http_status(response.status) + span.set_http_status(response.status) return response Application._handle = sentry_app_handle @@ -363,3 +370,30 @@ def get_aiohttp_request_data(request): # request has no body return None + + +def _prepopulate_attributes(request): + # type: (Request) -> dict[str, Any] + """Construct initial span attributes that can be used in traces sampler.""" + attributes = {} + + for prop, attr in REQUEST_PROPERTY_TO_ATTRIBUTE.items(): + if getattr(request, prop, None) is not None: + attributes[attr] = getattr(request, prop) + + if getattr(request, "host", None) is not None: + try: + host, port = request.host.split(":") + attributes["server.address"] = host + attributes["server.port"] = port + except ValueError: + attributes["server.address"] = request.host + + try: + url = f"{request.scheme}://{request.host}{request.path}" + if request.query_string: + attributes["url.full"] = f"{url}?{request.query_string}" + except Exception: + pass + + return attributes diff --git a/sentry_sdk/integrations/opentelemetry/sampler.py b/sentry_sdk/integrations/opentelemetry/sampler.py index 302b66aaaa..0997048532 100644 --- a/sentry_sdk/integrations/opentelemetry/sampler.py +++ b/sentry_sdk/integrations/opentelemetry/sampler.py @@ -153,7 +153,6 @@ def should_sample( } sampling_context.update(attributes) sample_rate = client.options["traces_sampler"](sampling_context) - else: # Check if there is a parent with a sampling decision parent_sampled = get_parent_sampled(parent_span_context, trace_id) diff --git a/tests/integrations/aiohttp/test_aiohttp.py b/tests/integrations/aiohttp/test_aiohttp.py index 432427b08e..8327832acc 100644 --- a/tests/integrations/aiohttp/test_aiohttp.py +++ b/tests/integrations/aiohttp/test_aiohttp.py @@ -1,12 +1,12 @@ import asyncio import json +import re from contextlib import suppress from unittest import mock import pytest from aiohttp import web, ClientSession from aiohttp.client import ServerDisconnectedError -from aiohttp.web_request import Request from aiohttp.web_exceptions import ( HTTPInternalServerError, HTTPNetworkAuthenticationRequired, @@ -291,13 +291,12 @@ async def hello(request): @pytest.mark.asyncio -async def test_traces_sampler_gets_request_object_in_sampling_context( +async def test_traces_sampler_gets_attributes_in_sampling_context( sentry_init, aiohttp_client, - DictionaryContaining, # noqa: N803 - ObjectDescribedBy, # noqa: N803 ): - traces_sampler = mock.Mock() + traces_sampler = mock.Mock(return_value=True) + sentry_init( integrations=[AioHttpIntegration()], traces_sampler=traces_sampler, @@ -310,17 +309,21 @@ async def kangaroo_handler(request): app.router.add_get("/tricks/kangaroo", kangaroo_handler) client = await aiohttp_client(app) - await client.get("/tricks/kangaroo") - - traces_sampler.assert_any_call( - DictionaryContaining( - { - "aiohttp_request": ObjectDescribedBy( - type=Request, attrs={"method": "GET", "path": "/tricks/kangaroo"} - ) - } - ) + await client.get("/tricks/kangaroo?jump=high") + + assert traces_sampler.call_count == 1 + sampling_context = traces_sampler.call_args_list[0][0][0] + assert isinstance(sampling_context, dict) + assert re.match( + r"http:\/\/127\.0\.0\.1:[0-9]{4,5}\/tricks\/kangaroo\?jump=high", + sampling_context["url.full"], ) + assert sampling_context["url.path"] == "/tricks/kangaroo" + assert sampling_context["url.query"] == "jump=high" + assert sampling_context["url.scheme"] == "http" + assert sampling_context["http.request.method"] == "GET" + assert sampling_context["server.address"] == "127.0.0.1" + assert sampling_context["server.port"].isnumeric() @pytest.mark.asyncio @@ -574,9 +577,8 @@ async def handler(request): client = await aiohttp_client(raw_server) resp = await client.get("/", headers={"bagGage": "custom=value"}) - assert ( - sorted(resp.request_info.headers["baggage"].split(",")) - == sorted([ + assert sorted(resp.request_info.headers["baggage"].split(",")) == sorted( + [ "custom=value", f"sentry-trace_id={transaction.trace_id}", "sentry-environment=production", @@ -584,7 +586,7 @@ async def handler(request): "sentry-transaction=/interactions/other-dogs/new-dog", "sentry-sample_rate=1.0", "sentry-sampled=true", - ]) + ] ) From 1c147e9b1e99d10e69f7e83d3e69b3035f53b977 Mon Sep 17 00:00:00 2001 From: Ivana Kellyer Date: Fri, 15 Nov 2024 14:12:08 +0100 Subject: [PATCH 106/244] Extract span attrs from Tornado request (#3784) --- MIGRATION_GUIDE.md | 12 ++++++ sentry_sdk/integrations/tornado.py | 46 +++++++++++++++++++++- tests/integrations/tornado/test_tornado.py | 27 +++++++++++++ 3 files changed, 83 insertions(+), 2 deletions(-) diff --git a/MIGRATION_GUIDE.md b/MIGRATION_GUIDE.md index fce361a9ec..5d0777c22a 100644 --- a/MIGRATION_GUIDE.md +++ b/MIGRATION_GUIDE.md @@ -32,6 +32,18 @@ Looking to upgrade from Sentry SDK 2.x to 3.x? Here's a comprehensive list of wh | `scheme` | `url.scheme` | | full URL | `url.full` | +- If you're using the Tornado integration, the `sampling_context` argument of `traces_sampler` doesn't contain the `tornado_request` object anymore. Instead, some of the individual properties of the request are accessible, if available, as follows: + + | Request property | Sampling context key(s) | + | ---------------- | --------------------------------------------------- | + | `path` | `url.path` | + | `query` | `url.query` | + | `protocol` | `url.scheme` | + | `method` | `http.request.method` | + | `host` | `server.address`, `server.port` | + | `version` | `network.protocol.name`, `network.protocol.version` | + | full URL | `url.full` | + - If you're using the generic WSGI integration, the `sampling_context` argument of `traces_sampler` doesn't contain the `wsgi_environ` object anymore. Instead, the individual properties of the environment are accessible, if available, as follows: | Env property | Sampling context key(s) | diff --git a/sentry_sdk/integrations/tornado.py b/sentry_sdk/integrations/tornado.py index 21532fbba5..591f59ec03 100644 --- a/sentry_sdk/integrations/tornado.py +++ b/sentry_sdk/integrations/tornado.py @@ -27,8 +27,9 @@ try: from tornado import version_info as TORNADO_VERSION - from tornado.web import RequestHandler, HTTPError from tornado.gen import coroutine + from tornado.httputil import HTTPServerRequest + from tornado.web import RequestHandler, HTTPError except ImportError: raise DidNotEnable("Tornado not installed") @@ -44,6 +45,14 @@ from sentry_sdk._types import Event, EventProcessor +REQUEST_PROPERTY_TO_ATTRIBUTE = { + "method": "http.request.method", + "path": "url.path", + "query": "url.query", + "protocol": "url.scheme", +} + + class TornadoIntegration(Integration): identifier = "tornado" origin = f"auto.http.{identifier}" @@ -124,7 +133,7 @@ def _handle_request_impl(self): name="generic Tornado request", source=TRANSACTION_SOURCE_ROUTE, origin=TornadoIntegration.origin, - custom_sampling_context={"tornado_request": self.request}, + attributes=_prepopulate_attributes(self.request), ): yield @@ -218,3 +227,36 @@ def files(self): def size_of_file(self, file): # type: (Any) -> int return len(file.body or ()) + + +def _prepopulate_attributes(request): + # type: (HTTPServerRequest) -> dict[str, Any] + # https://www.tornadoweb.org/en/stable/httputil.html#tornado.httputil.HTTPServerRequest + attributes = {} + + for prop, attr in REQUEST_PROPERTY_TO_ATTRIBUTE.items(): + if getattr(request, prop, None) is not None: + attributes[attr] = getattr(request, prop) + + if getattr(request, "version", None): + try: + proto, version = request.version.split("/") + attributes["network.protocol.name"] = proto + attributes["network.protocol.version"] = version + except ValueError: + attributes["network.protocol.name"] = request.version + + if getattr(request, "host", None) is not None: + try: + address, port = request.host.split(":") + attributes["server.address"] = address + attributes["server.port"] = port + except ValueError: + attributes["server.address"] = request.host + + try: + attributes["url.full"] = request.full_url() + except Exception: + pass + + return attributes diff --git a/tests/integrations/tornado/test_tornado.py b/tests/integrations/tornado/test_tornado.py index 294f605f6a..7ad974c535 100644 --- a/tests/integrations/tornado/test_tornado.py +++ b/tests/integrations/tornado/test_tornado.py @@ -1,4 +1,5 @@ import json +import re import pytest @@ -450,3 +451,29 @@ def test_span_origin(tornado_testcase, sentry_init, capture_events): (_, event) = events assert event["contexts"]["trace"]["origin"] == "auto.http.tornado" + + +def test_attributes_in_traces_sampler(tornado_testcase, sentry_init): + def traces_sampler(sampling_context): + assert sampling_context["url.query"] == "foo=bar" + assert sampling_context["url.path"] == "/hi" + assert sampling_context["url.scheme"] == "http" + assert re.match( + r"http:\/\/127\.0\.0\.1:[0-9]{4,5}\/hi\?foo=bar", + sampling_context["url.full"], + ) + assert sampling_context["http.request.method"] == "GET" + assert sampling_context["server.address"] == "127.0.0.1" + assert sampling_context["server.port"].isnumeric() + assert sampling_context["network.protocol.name"] == "HTTP" + assert sampling_context["network.protocol.version"] == "1.1" + + return True + + sentry_init( + integrations=[TornadoIntegration], + traces_sampler=traces_sampler, + ) + + client = tornado_testcase(Application([(r"/hi", HelloHandler)])) + client.fetch("/hi?foo=bar") From c29382d15513230df181f5b1e6455bb688143340 Mon Sep 17 00:00:00 2001 From: Ivana Kellyer Date: Fri, 15 Nov 2024 15:32:33 +0100 Subject: [PATCH 107/244] fix some lint things (#3783) --- sentry_sdk/_init_implementation.py | 2 -- sentry_sdk/integrations/aiohttp.py | 2 +- tests/test_api.py | 1 - tests/test_utils.py | 49 ------------------------------ 4 files changed, 1 insertion(+), 53 deletions(-) diff --git a/sentry_sdk/_init_implementation.py b/sentry_sdk/_init_implementation.py index 5be94e1e60..dc235af243 100644 --- a/sentry_sdk/_init_implementation.py +++ b/sentry_sdk/_init_implementation.py @@ -1,5 +1,3 @@ -import warnings - from typing import TYPE_CHECKING import sentry_sdk diff --git a/sentry_sdk/integrations/aiohttp.py b/sentry_sdk/integrations/aiohttp.py index 9257eca49a..ccc4593606 100644 --- a/sentry_sdk/integrations/aiohttp.py +++ b/sentry_sdk/integrations/aiohttp.py @@ -390,7 +390,7 @@ def _prepopulate_attributes(request): attributes["server.address"] = request.host try: - url = f"{request.scheme}://{request.host}{request.path}" + url = f"{request.scheme}://{request.host}{request.path}" # noqa: E231 if request.query_string: attributes["url.full"] = f"{url}?{request.query_string}" except Exception: diff --git a/tests/test_api.py b/tests/test_api.py index 0c79c035b1..46fc24fd24 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -1,7 +1,6 @@ import pytest from unittest import mock -import sentry_sdk from sentry_sdk import ( capture_exception, continue_trace, diff --git a/tests/test_utils.py b/tests/test_utils.py index 40894a8e52..5011662f05 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -60,55 +60,6 @@ def _normalize_distribution_name(name): return re.sub(r"[-_.]+", "-", name).lower() -@pytest.mark.parametrize( - ("input_str", "expected_output"), - ( - ( - "2021-01-01T00:00:00.000000Z", - datetime(2021, 1, 1, tzinfo=timezone.utc), - ), # UTC time - ( - "2021-01-01T00:00:00.000000", - datetime(2021, 1, 1).astimezone(timezone.utc), - ), # No TZ -- assume local but convert to UTC - ( - "2021-01-01T00:00:00Z", - datetime(2021, 1, 1, tzinfo=timezone.utc), - ), # UTC - No milliseconds - ( - "2021-01-01T00:00:00.000000+00:00", - datetime(2021, 1, 1, tzinfo=timezone.utc), - ), - ( - "2021-01-01T00:00:00.000000-00:00", - datetime(2021, 1, 1, tzinfo=timezone.utc), - ), - ( - "2021-01-01T00:00:00.000000+0000", - datetime(2021, 1, 1, tzinfo=timezone.utc), - ), - ( - "2021-01-01T00:00:00.000000-0000", - datetime(2021, 1, 1, tzinfo=timezone.utc), - ), - ( - "2020-12-31T00:00:00.000000+02:00", - datetime(2020, 12, 31, tzinfo=timezone(timedelta(hours=2))), - ), # UTC+2 time - ( - "2020-12-31T00:00:00.000000-0200", - datetime(2020, 12, 31, tzinfo=timezone(timedelta(hours=-2))), - ), # UTC-2 time - ( - "2020-12-31T00:00:00-0200", - datetime(2020, 12, 31, tzinfo=timezone(timedelta(hours=-2))), - ), # UTC-2 time - no milliseconds - ), -) -def test_datetime_from_isoformat(input_str, expected_output): - assert datetime_from_isoformat(input_str) == expected_output, input_str - - @pytest.mark.parametrize( "env_var_value,strict,expected", [ From aa9c5ca97efc9968f77602ec02f80d8864566bf8 Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Fri, 15 Nov 2024 16:19:48 +0100 Subject: [PATCH 108/244] Fix test_api (#3787) --- tests/conftest.py | 15 +++++++++++++ tests/test_api.py | 54 +++++++++++++++++++++++------------------------ 2 files changed, 42 insertions(+), 27 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 94fdf55707..cdac88aa2c 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -642,3 +642,18 @@ def __eq__(self, other): def __ne__(self, other): return not self.__eq__(other) + + +@pytest.fixture(name="SortedBaggage") +def sorted_baggage_matcher(): + class SortedBaggage: + def __init__(self, baggage): + self.baggage = baggage + + def __eq__(self, other): + return sorted(self.baggage.split(",")) == sorted(other.split(",")) + + def __ne__(self, other): + return not self.__eq__(other) + + return SortedBaggage diff --git a/tests/test_api.py b/tests/test_api.py index 46fc24fd24..1be69d4a84 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -9,7 +9,7 @@ get_current_span, get_traceparent, is_initialized, - start_transaction, + start_span, set_tags, get_global_scope, get_current_scope, @@ -43,23 +43,23 @@ def test_get_current_span_current_scope(sentry_init): @pytest.mark.forked -def test_get_current_span_current_scope_with_transaction(sentry_init): +def test_get_current_span_current_scope_with_span(sentry_init): sentry_init() assert get_current_span() is None - with start_transaction() as new_transaction: - assert get_current_span() == new_transaction + with start_span() as new_span: + assert get_current_span() == new_span @pytest.mark.forked def test_traceparent_with_tracing_enabled(sentry_init): sentry_init(traces_sample_rate=1.0) - with start_transaction() as transaction: + with start_span() as span: expected_traceparent = "%s-%s-1" % ( - transaction.trace_id, - transaction.span_id, + span.trace_id, + span.span_id, ) assert get_traceparent() == expected_traceparent @@ -77,7 +77,7 @@ def test_traceparent_with_tracing_disabled(sentry_init): @pytest.mark.forked -def test_baggage_with_tracing_disabled(sentry_init): +def test_baggage_with_tracing_disabled(sentry_init, SortedBaggage): sentry_init(release="1.0.0", environment="dev") propagation_context = get_isolation_scope()._propagation_context expected_baggage = ( @@ -85,43 +85,43 @@ def test_baggage_with_tracing_disabled(sentry_init): propagation_context.trace_id ) ) - assert get_baggage() == expected_baggage + assert get_baggage() == SortedBaggage(expected_baggage) @pytest.mark.forked -def test_baggage_with_tracing_enabled(sentry_init): +def test_baggage_with_tracing_enabled(sentry_init, SortedBaggage): sentry_init(traces_sample_rate=1.0, release="1.0.0", environment="dev") - with start_transaction() as transaction: + with start_span() as span: expected_baggage = "sentry-trace_id={},sentry-environment=dev,sentry-release=1.0.0,sentry-sample_rate=1.0,sentry-sampled={}".format( - transaction.trace_id, "true" if transaction.sampled else "false" + span.trace_id, "true" if span.sampled else "false" ) - assert get_baggage() == expected_baggage + assert get_baggage() == SortedBaggage(expected_baggage) @pytest.mark.forked def test_continue_trace(sentry_init): - sentry_init() + sentry_init(traces_sample_rate=1.0) trace_id = "471a43a4192642f0b136d5159a501701" parent_span_id = "6e8f22c393e68f19" parent_sampled = 1 - transaction = continue_trace( + + with continue_trace( { "sentry-trace": "{}-{}-{}".format(trace_id, parent_span_id, parent_sampled), "baggage": "sentry-trace_id=566e3688a61d4bc888951642d6f14a19", }, - name="some name", - ) - with start_transaction(transaction): - assert transaction.name == "some name" - - propagation_context = get_isolation_scope()._propagation_context - assert propagation_context.trace_id == transaction.trace_id == trace_id - assert propagation_context.parent_span_id == parent_span_id - assert propagation_context.parent_sampled == parent_sampled - assert propagation_context.dynamic_sampling_context == { - "trace_id": "566e3688a61d4bc888951642d6f14a19" - } + ): + with start_span(name="some name") as span: + assert span.name == "some name" + + propagation_context = get_isolation_scope()._propagation_context + assert propagation_context.trace_id == span.trace_id == trace_id + assert propagation_context.parent_span_id == parent_span_id + assert propagation_context.parent_sampled == parent_sampled + assert propagation_context.dynamic_sampling_context == { + "trace_id": "566e3688a61d4bc888951642d6f14a19" + } @pytest.mark.forked From b2b6315c238ba882cfb8b62e2c78957c540e66bf Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Mon, 18 Nov 2024 12:44:08 +0100 Subject: [PATCH 109/244] Add sample_rate property and fix test_monitor (#3790) --- sentry_sdk/tracing.py | 13 +++++++++++++ tests/test_monitor.py | 12 +++++------- 2 files changed, 18 insertions(+), 7 deletions(-) diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py index 59971e274e..9c87a45903 100644 --- a/sentry_sdk/tracing.py +++ b/sentry_sdk/tracing.py @@ -1403,6 +1403,19 @@ def sampled(self): # type: () -> Optional[bool] return self._otel_span.get_span_context().trace_flags.sampled + @property + def sample_rate(self): + # type: () -> Optional[float] + from sentry_sdk.integrations.opentelemetry.consts import ( + TRACESTATE_SAMPLE_RATE_KEY, + ) + + sample_rate = self._otel_span.get_span_context().trace_state.get( + TRACESTATE_SAMPLE_RATE_KEY + ) + sample_rate = cast("Optional[str]", sample_rate) + return float(sample_rate) if sample_rate is not None else None + @property def op(self): # type: () -> Optional[str] diff --git a/tests/test_monitor.py b/tests/test_monitor.py index 041169d515..1015e9f2ad 100644 --- a/tests/test_monitor.py +++ b/tests/test_monitor.py @@ -55,7 +55,7 @@ def test_monitor_unhealthy(sentry_init): assert monitor.downsample_factor == (i + 1 if i < 10 else 10) -def test_transaction_uses_downsample_rate( +def test_root_span_uses_downsample_rate( sentry_init, capture_envelopes, capture_record_lost_event_calls, monkeypatch ): sentry_init( @@ -78,16 +78,14 @@ def test_transaction_uses_downsample_rate( assert monitor.is_healthy() is False assert monitor.downsample_factor == 1 - with sentry_sdk.start_transaction(name="foobar") as transaction: + with sentry_sdk.start_span(name="foobar") as root_span: with sentry_sdk.start_span(name="foospan"): with sentry_sdk.start_span(name="foospan2"): with sentry_sdk.start_span(name="foospan3"): ... - assert transaction.sampled is False - assert ( - transaction.sample_rate == 0.5 - ) # TODO: this fails until we put the sample_rate in the POTelSpan + assert root_span.sampled is False + assert root_span.sample_rate == 0.5 assert len(envelopes) == 0 @@ -104,7 +102,7 @@ def test_transaction_uses_downsample_rate( "span", None, 1, - ), # Only one span (the transaction itself) is counted, since we did not record any spans in the first place. + ), # Only one span (the root span itself) is counted, since we did not record any spans in the first place. ] ) From 83442f504f00f4b61aceb052be28b4dcbbe51d58 Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Fri, 15 Nov 2024 16:19:28 +0100 Subject: [PATCH 110/244] Fix test_breadcrumbs --- sentry_sdk/tracing.py | 6 ++- tests/test_breadcrumbs.py | 80 +++++++-------------------------------- 2 files changed, 17 insertions(+), 69 deletions(-) diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py index 9c87a45903..becf7979e2 100644 --- a/sentry_sdk/tracing.py +++ b/sentry_sdk/tracing.py @@ -1253,8 +1253,10 @@ def __init__( # Prepopulate some attrs so that they're accessible in traces_sampler attributes = attributes or {} - attributes[SentrySpanAttribute.OP] = op - attributes[SentrySpanAttribute.SOURCE] = source + if op is not None: + attributes[SentrySpanAttribute.OP] = op + if source is not None: + attributes[SentrySpanAttribute.SOURCE] = source if sampled is not None: attributes[SentrySpanAttribute.CUSTOM_SAMPLED] = sampled diff --git a/tests/test_breadcrumbs.py b/tests/test_breadcrumbs.py index 988f536fde..391c24cfc7 100644 --- a/tests/test_breadcrumbs.py +++ b/tests/test_breadcrumbs.py @@ -1,7 +1,6 @@ from unittest import mock import sentry_sdk -from sentry_sdk.consts import OP def test_breadcrumbs(sentry_init, capture_events): @@ -26,7 +25,7 @@ def test_breadcrumbs(sentry_init, capture_events): }, } - with sentry_sdk.start_transaction(name="trx-breadcrumbs"): + with sentry_sdk.start_span(name="trx-breadcrumbs"): sentry_sdk.add_breadcrumb(message="breadcrumb0", **add_breadcrumbs_kwargs) with sentry_sdk.start_span(name="span1", op="function"): @@ -37,26 +36,9 @@ def test_breadcrumbs(sentry_init, capture_events): message="breadcrumb2", **add_breadcrumbs_kwargs ) - # Spans that create breadcrumbs automatically - with sentry_sdk.start_span(name="span3", op=OP.DB_REDIS) as span3: - span3.set_data("span3_data", "data on the redis span") - span3.set_tag("span3_tag", "tag on the redis span") - - with sentry_sdk.start_span(name="span4", op=OP.HTTP_CLIENT) as span4: - span4.set_data("span4_data", "data on the http.client span") - span4.set_tag("span4_tag", "tag on the http.client span") - - with sentry_sdk.start_span(name="span5", op=OP.SUBPROCESS) as span5: - span5.set_data("span5_data", "data on the subprocess span") - span5.set_tag("span5_tag", "tag on the subprocess span") - - with sentry_sdk.start_span(name="span6", op="function") as span6: - # This data on the span is not added to custom breadcrumbs. - # Data from the span is only added to automatic breadcrumbs shown above - span6.set_data("span6_data", "data on span6") - span6.set_tag("span6_tag", "tag on the span6") + with sentry_sdk.start_span(name="span3", op="function"): sentry_sdk.add_breadcrumb( - message="breadcrumb6", **add_breadcrumbs_kwargs + message="breadcrumb3", **add_breadcrumbs_kwargs ) try: @@ -64,14 +46,15 @@ def test_breadcrumbs(sentry_init, capture_events): except ZeroDivisionError as ex: sentry_sdk.capture_exception(ex) - (error,) = events + assert len(events) == 2 + error = events[0] breadcrumbs = error["breadcrumbs"]["values"] for crumb in breadcrumbs: print(crumb) - assert len(breadcrumbs) == 7 + assert len(breadcrumbs) == 4 # Check for my custom breadcrumbs for i in range(0, 3): @@ -88,53 +71,16 @@ def test_breadcrumbs(sentry_init, capture_events): } assert breadcrumbs[i]["timestamp"] == mock.ANY - # Check automatic redis breadcrumbs - assert breadcrumbs[3]["message"] == "span3" - assert breadcrumbs[3]["type"] == "redis" - assert breadcrumbs[3]["category"] == "redis" - assert "level" not in breadcrumbs[3] - assert "origin" not in breadcrumbs[3] + # Check for custom breadcrumbs on span3 + assert breadcrumbs[3]["message"] == "breadcrumb3" + assert breadcrumbs[3]["type"] == "navigation" + assert breadcrumbs[3]["category"] == "unit_tests.breadcrumbs" + assert breadcrumbs[3]["level"] == "fatal" + assert breadcrumbs[3]["origin"] == "unit-tests" assert breadcrumbs[3]["data"] == { - "span3_tag": "tag on the redis span", - } - assert breadcrumbs[3]["timestamp"] == mock.ANY - - # Check automatic http.client breadcrumbs - assert "message" not in breadcrumbs[4] - assert breadcrumbs[4]["type"] == "http" - assert breadcrumbs[4]["category"] == "httplib" - assert "level" not in breadcrumbs[4] - assert "origin" not in breadcrumbs[4] - assert breadcrumbs[4]["data"] == { - "thread.id": mock.ANY, - "thread.name": mock.ANY, - "span4_data": "data on the http.client span", - } - assert breadcrumbs[4]["timestamp"] == mock.ANY - - # Check automatic subprocess breadcrumbs - assert breadcrumbs[5]["message"] == "span5" - assert breadcrumbs[5]["type"] == "subprocess" - assert breadcrumbs[5]["category"] == "subprocess" - assert "level" not in breadcrumbs[5] - assert "origin" not in breadcrumbs[5] - assert breadcrumbs[5]["data"] == { - "thread.id": mock.ANY, - "thread.name": mock.ANY, - "span5_data": "data on the subprocess span", - } - assert breadcrumbs[5]["timestamp"] == mock.ANY - - # Check for custom breadcrumbs on span6 - assert breadcrumbs[6]["message"] == "breadcrumb6" - assert breadcrumbs[6]["type"] == "navigation" - assert breadcrumbs[6]["category"] == "unit_tests.breadcrumbs" - assert breadcrumbs[6]["level"] == "fatal" - assert breadcrumbs[6]["origin"] == "unit-tests" - assert breadcrumbs[6]["data"] == { "string": "foobar", "number": 4.2, "array": [1, 2, 3], "dict": {"foo": "bar"}, } - assert breadcrumbs[6]["timestamp"] == mock.ANY + assert breadcrumbs[3]["timestamp"] == mock.ANY From 238492d939f47f2cdc14bae1bb75386992aa9ca5 Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Mon, 18 Nov 2024 12:43:39 +0100 Subject: [PATCH 111/244] Fix test_dsc and twp baggage handling (#3789) --- .../integrations/opentelemetry/utils.py | 4 +- sentry_sdk/scope.py | 9 ++--- tests/test_dsc.py | 38 +++++++++---------- 3 files changed, 24 insertions(+), 27 deletions(-) diff --git a/sentry_sdk/integrations/opentelemetry/utils.py b/sentry_sdk/integrations/opentelemetry/utils.py index 07e60ddd0f..6127ceba5c 100644 --- a/sentry_sdk/integrations/opentelemetry/utils.py +++ b/sentry_sdk/integrations/opentelemetry/utils.py @@ -3,7 +3,7 @@ from datetime import datetime, timezone from urllib3.util import parse_url as urlparse -from urllib.parse import quote +from urllib.parse import quote, unquote from opentelemetry.trace import ( Span as AbstractSpan, SpanKind, @@ -354,7 +354,7 @@ def dsc_from_trace_state(trace_state): for k, v in trace_state.items(): if Baggage.SENTRY_PREFIX_REGEX.match(k): key = re.sub(Baggage.SENTRY_PREFIX_REGEX, "", k) - dsc[key] = v + dsc[unquote(key)] = unquote(v) return dsc diff --git a/sentry_sdk/scope.py b/sentry_sdk/scope.py index 48b8571b98..fbe258fb8a 100644 --- a/sentry_sdk/scope.py +++ b/sentry_sdk/scope.py @@ -480,13 +480,10 @@ def generate_propagation_context(self, incoming_data=None): def get_dynamic_sampling_context(self): # type: () -> Optional[Dict[str, str]] """ - Returns the Dynamic Sampling Context from the Propagation Context. + Returns the Dynamic Sampling Context from the baggage or populates one. """ - return ( - self._propagation_context.dynamic_sampling_context - if self._propagation_context - else None - ) + baggage = self.get_baggage() + return baggage.dynamic_sampling_context() if baggage else None def get_traceparent(self, *args, **kwargs): # type: (Any, Any) -> Optional[str] diff --git a/tests/test_dsc.py b/tests/test_dsc.py index 3b8cff5baf..e1ceb80a05 100644 --- a/tests/test_dsc.py +++ b/tests/test_dsc.py @@ -27,8 +27,8 @@ def test_dsc_head_of_trace(sentry_init, capture_envelopes): ) envelopes = capture_envelopes() - # We start a new transaction - with sentry_sdk.start_transaction(name="foo"): + # We start a new root_span + with sentry_sdk.start_span(name="foo"): pass assert len(envelopes) == 1 @@ -95,10 +95,10 @@ def test_dsc_continuation_of_trace(sentry_init, capture_envelopes): "HTTP_BAGGAGE": baggage, } - # We continue the incoming trace and start a new transaction - transaction = sentry_sdk.continue_trace(incoming_http_headers) - with sentry_sdk.start_transaction(transaction, name="foo"): - pass + # We continue the incoming trace and start a new root span + with sentry_sdk.continue_trace(incoming_http_headers): + with sentry_sdk.start_span(name="foo"): + pass assert len(envelopes) == 1 @@ -145,7 +145,7 @@ def test_dsc_issue(sentry_init, capture_envelopes): ) envelopes = capture_envelopes() - # No transaction is started, just an error is captured + # No root span is started, just an error is captured try: 1 / 0 except ZeroDivisionError as exp: @@ -181,8 +181,8 @@ def test_dsc_issue(sentry_init, capture_envelopes): def test_dsc_issue_with_tracing(sentry_init, capture_envelopes): """ - Our service has tracing enabled and an error occurs in an transaction. - Envelopes containing errors also have the same DSC than the transaction envelopes. + Our service has tracing enabled and an error occurs in an root span. + Envelopes containing errors also have the same DSC than the root span envelopes. """ sentry_init( dsn="https://mysecret@bla.ingest.sentry.io/12312012", @@ -192,8 +192,8 @@ def test_dsc_issue_with_tracing(sentry_init, capture_envelopes): ) envelopes = capture_envelopes() - # We start a new transaction and an error occurs - with sentry_sdk.start_transaction(name="foo"): + # We start a new root span and an error occurs + with sentry_sdk.start_span(name="foo"): try: 1 / 0 except ZeroDivisionError as exp: @@ -239,7 +239,7 @@ def test_dsc_issue_with_tracing(sentry_init, capture_envelopes): "traces_sample_rate", [ 0, # no traces will be started, but if incoming traces will be continued (by our instrumentations, not happening in this test) - None, # no tracing at all. This service will never create transactions. + None, # no tracing at all. This service will never create root spans. ], ) def test_dsc_issue_twp(sentry_init, capture_envelopes, traces_sample_rate): @@ -278,14 +278,14 @@ def test_dsc_issue_twp(sentry_init, capture_envelopes, traces_sample_rate): } # We continue the trace (meaning: saving the incoming trace information on the scope) - # but in this test, we do not start a transaction. - sentry_sdk.continue_trace(incoming_http_headers) + # but in this test, we do not start a root span. + with sentry_sdk.continue_trace(incoming_http_headers): - # No transaction is started, just an error is captured - try: - 1 / 0 - except ZeroDivisionError as exp: - sentry_sdk.capture_exception(exp) + # No root span is started, just an error is captured + try: + 1 / 0 + except ZeroDivisionError as exp: + sentry_sdk.capture_exception(exp) assert len(envelopes) == 1 From 6afa91c523984e410437fd5e359057fdc2bb445d Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Mon, 18 Nov 2024 18:13:07 +0100 Subject: [PATCH 112/244] Fix tracing tests (#3799) --- .../opentelemetry/potel_span_processor.py | 5 +- .../integrations/opentelemetry/sampler.py | 4 +- sentry_sdk/scope.py | 8 - sentry_sdk/tracing.py | 7 +- tests/tracing/test_integration_tests.py | 179 ++++------ tests/tracing/test_misc.py | 305 ++---------------- tests/tracing/test_propagation.py | 40 --- tests/tracing/test_sampling.py | 73 ++--- tests/tracing/test_span_name.py | 35 +- tests/tracing/test_span_origin.py | 8 +- 10 files changed, 133 insertions(+), 531 deletions(-) delete mode 100644 tests/tracing/test_propagation.py diff --git a/sentry_sdk/integrations/opentelemetry/potel_span_processor.py b/sentry_sdk/integrations/opentelemetry/potel_span_processor.py index 1d27642d1e..1736fcd25e 100644 --- a/sentry_sdk/integrations/opentelemetry/potel_span_processor.py +++ b/sentry_sdk/integrations/opentelemetry/potel_span_processor.py @@ -179,7 +179,7 @@ def _root_span_to_transaction_event(self, span): transaction_name, transaction_source = extract_transaction_name_source(span) span_data = extract_span_data(span) - (_, description, _, http_status, _) = span_data + (_, description, status, http_status, _) = span_data trace_context = get_trace_context(span, span_data=span_data) contexts = {"trace": trace_context} @@ -241,6 +241,9 @@ def _span_to_json(self, span): } ) + if status: + span_json.setdefault("tags", {})["status"] = status + if parent_span_id: span_json["parent_span_id"] = parent_span_id diff --git a/sentry_sdk/integrations/opentelemetry/sampler.py b/sentry_sdk/integrations/opentelemetry/sampler.py index 0997048532..5fa41d28fc 100644 --- a/sentry_sdk/integrations/opentelemetry/sampler.py +++ b/sentry_sdk/integrations/opentelemetry/sampler.py @@ -34,8 +34,8 @@ def get_parent_sampled(parent_context, trace_id): # Only inherit sample rate if `traceId` is the same if is_span_context_valid and parent_context.trace_id == trace_id: # this is getSamplingDecision in JS - if parent_context.trace_flags.sampled: - return True + if parent_context.trace_flags.sampled is not None: + return parent_context.trace_flags.sampled dsc_sampled = parent_context.trace_state.get(TRACESTATE_SAMPLED_KEY) if dsc_sampled == "true": diff --git a/sentry_sdk/scope.py b/sentry_sdk/scope.py index fbe258fb8a..12b6c5aed6 100644 --- a/sentry_sdk/scope.py +++ b/sentry_sdk/scope.py @@ -786,14 +786,6 @@ def span(self, span): # type: (Optional[Span]) -> None """Set current tracing span.""" self._span = span - # XXX: this differs from the implementation in JS, there Scope.setSpan - # does not set Scope._transactionName. - if isinstance(span, Transaction): - transaction = span - if transaction.name: - self._transaction = transaction.name - if transaction.source: - self._transaction_info["source"] = transaction.source @property def profile(self): diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py index becf7979e2..18b18ba8ef 100644 --- a/sentry_sdk/tracing.py +++ b/sentry_sdk/tracing.py @@ -163,6 +163,7 @@ class TransactionKwargs(SpanKwargs, total=False): } DEFAULT_SPAN_ORIGIN = "manual" +DEFAULT_SPAN_NAME = "" tracer = otel_trace.get_tracer(__name__) @@ -1249,7 +1250,7 @@ def __init__( # OTel timestamps have nanosecond precision start_timestamp = convert_to_otel_timestamp(start_timestamp) - span_name = name or description or op or "" + span_name = name or description or op or DEFAULT_SPAN_NAME # Prepopulate some attrs so that they're accessible in traces_sampler attributes = attributes or {} @@ -1398,7 +1399,9 @@ def span_id(self): @property def is_valid(self): # type: () -> bool - return self._otel_span.get_span_context().is_valid + return self._otel_span.get_span_context().is_valid and isinstance( + self._otel_span, ReadableSpan + ) @property def sampled(self): diff --git a/tests/tracing/test_integration_tests.py b/tests/tracing/test_integration_tests.py index e27dbea901..3a4bef77fb 100644 --- a/tests/tracing/test_integration_tests.py +++ b/tests/tracing/test_integration_tests.py @@ -1,5 +1,3 @@ -import weakref -import gc import re import pytest import random @@ -7,12 +5,11 @@ import sentry_sdk from sentry_sdk import ( capture_message, + continue_trace, start_span, - start_transaction, ) from sentry_sdk.consts import SPANSTATUS from sentry_sdk.transport import Transport -from sentry_sdk.tracing import Transaction @pytest.mark.parametrize("sample_rate", [0.0, 1.0]) @@ -20,8 +17,8 @@ def test_basic(sentry_init, capture_events, sample_rate): sentry_init(traces_sample_rate=sample_rate) events = capture_events() - with start_transaction(name="hi") as transaction: - transaction.set_status(SPANSTATUS.OK) + with start_span(name="hi") as root_span: + root_span.set_status(SPANSTATUS.OK) with pytest.raises(ZeroDivisionError): with start_span(op="foo", name="foodesc"): 1 / 0 @@ -39,21 +36,23 @@ def test_basic(sentry_init, capture_events, sample_rate): span1, span2 = event["spans"] parent_span = event assert span1["tags"]["status"] == "internal_error" + assert span1["status"] == "internal_error" assert span1["op"] == "foo" assert span1["description"] == "foodesc" assert "status" not in span2.get("tags", {}) assert span2["op"] == "bar" assert span2["description"] == "bardesc" assert parent_span["transaction"] == "hi" - assert "status" not in event["tags"] + assert "status" not in event.get("tags", {}) assert event["contexts"]["trace"]["status"] == "ok" else: assert not events -@pytest.mark.parametrize("sampled", [True, False, None]) @pytest.mark.parametrize("sample_rate", [0.0, 1.0]) -def test_continue_from_headers(sentry_init, capture_envelopes, sampled, sample_rate): +def test_continue_trace( + sentry_init, capture_envelopes, sample_rate, SortedBaggage +): # noqa:N803 """ Ensure data is actually passed along via headers, and that they are read correctly. @@ -62,55 +61,41 @@ def test_continue_from_headers(sentry_init, capture_envelopes, sampled, sample_r envelopes = capture_envelopes() # make a parent transaction (normally this would be in a different service) - with start_transaction(name="hi", sampled=True if sample_rate == 0 else None): - with start_span() as old_span: - old_span.sampled = sampled - headers = dict( - sentry_sdk.get_current_scope().iter_trace_propagation_headers(old_span) - ) - headers["baggage"] = ( - "other-vendor-value-1=foo;bar;baz, " - "sentry-trace_id=771a43a4192642f0b136d5159a501700, " - "sentry-public_key=49d0f7386ad645858ae85020e393bef3, " - "sentry-sample_rate=0.01337, sentry-user_id=Amelie, " - "other-vendor-value-2=foo;bar;" - ) + with start_span(name="hi"): + with start_span(name="inner") as old_span: + headers = dict(old_span.iter_headers()) + assert headers["sentry-trace"] + assert headers["baggage"] # child transaction, to prove that we can read 'sentry-trace' header data correctly - child_transaction = Transaction.continue_from_headers(headers, name="WRONG") - assert child_transaction is not None - assert child_transaction.parent_sampled == sampled - assert child_transaction.trace_id == old_span.trace_id - assert child_transaction.same_process_as_parent is False - assert child_transaction.parent_span_id == old_span.span_id - assert child_transaction.span_id != old_span.span_id - - baggage = child_transaction._baggage - assert baggage - assert not baggage.mutable - assert baggage.sentry_items == { - "public_key": "49d0f7386ad645858ae85020e393bef3", - "trace_id": "771a43a4192642f0b136d5159a501700", - "user_id": "Amelie", - "sample_rate": "0.01337", - } - - # add child transaction to the scope, to show that the captured message will - # be tagged with the trace id (since it happens while the transaction is - # open) - with start_transaction(child_transaction): - # change the transaction name from "WRONG" to make sure the change - # is reflected in the final data - sentry_sdk.get_current_scope().transaction = "ho" - capture_message("hello") + with continue_trace(headers): + with start_span(name="WRONG") as child_root_span: + assert child_root_span is not None + assert child_root_span.sampled == (sample_rate == 1.0) + if child_root_span.sampled: + assert child_root_span.parent_span_id == old_span.span_id + assert child_root_span.trace_id == old_span.trace_id + assert child_root_span.span_id != old_span.span_id + + baggage = child_root_span.get_baggage() + assert baggage.serialize() == SortedBaggage(headers["baggage"]) + + # change the transaction name from "WRONG" to make sure the change + # is reflected in the final data + sentry_sdk.get_current_scope().set_transaction_name("ho") + # to show that the captured message will be tagged with the trace id + # (since it happens while the transaction is open) + capture_message("hello") # in this case the child transaction won't be captured - if sampled is False or (sample_rate == 0 and sampled is None): - trace1, message = envelopes + # but message follows twp spec + if sample_rate == 0.0: + (message,) = envelopes message_payload = message.get_event() - trace1_payload = trace1.get_transaction_event() - - assert trace1_payload["transaction"] == "hi" + assert message_payload["transaction"] == "ho" + assert ( + child_root_span.trace_id == message_payload["contexts"]["trace"]["trace_id"] + ) else: trace1, message, trace2 = envelopes trace1_payload = trace1.get_transaction_event() @@ -123,24 +108,22 @@ def test_continue_from_headers(sentry_init, capture_envelopes, sampled, sample_r assert ( trace1_payload["contexts"]["trace"]["trace_id"] == trace2_payload["contexts"]["trace"]["trace_id"] - == child_transaction.trace_id + == child_root_span.trace_id == message_payload["contexts"]["trace"]["trace_id"] ) assert trace2.headers["trace"] == baggage.dynamic_sampling_context() - assert trace2.headers["trace"] == { - "public_key": "49d0f7386ad645858ae85020e393bef3", - "trace_id": "771a43a4192642f0b136d5159a501700", - "user_id": "Amelie", - "sample_rate": "0.01337", - } assert message_payload["message"] == "hello" @pytest.mark.parametrize("sample_rate", [0.5, 1.0]) def test_dynamic_sampling_head_sdk_creates_dsc( - sentry_init, capture_envelopes, sample_rate, monkeypatch + sentry_init, + capture_envelopes, + sample_rate, + monkeypatch, + SortedBaggage, # noqa: N803 ): sentry_init(traces_sample_rate=sample_rate, release="foo") envelopes = capture_envelopes() @@ -148,31 +131,20 @@ def test_dynamic_sampling_head_sdk_creates_dsc( # make sure transaction is sampled for both cases monkeypatch.setattr(random, "random", lambda: 0.1) - transaction = Transaction.continue_from_headers({}, name="Head SDK tx") - - # will create empty mutable baggage - baggage = transaction._baggage - assert baggage - assert baggage.mutable - assert baggage.sentry_items == {} - assert baggage.third_party_items == "" - - with start_transaction(transaction): - with start_span(op="foo", name="foodesc"): - pass + with continue_trace({}): + with start_span(name="Head SDK tx"): + with start_span(op="foo", name="foodesc") as span: + baggage = span.get_baggage() - # finish will create a new baggage entry - baggage = transaction._baggage - trace_id = transaction.trace_id + trace_id = span.trace_id assert baggage - assert not baggage.mutable assert baggage.third_party_items == "" assert baggage.sentry_items == { "environment": "production", "release": "foo", "sample_rate": str(sample_rate), - "sampled": "true" if transaction.sampled else "false", + "sampled": "true" if span.sampled else "false", "transaction": "Head SDK tx", "trace_id": trace_id, } @@ -184,9 +156,9 @@ def test_dynamic_sampling_head_sdk_creates_dsc( "sentry-transaction=Head%%20SDK%%20tx," "sentry-sample_rate=%s," "sentry-sampled=%s" - % (trace_id, sample_rate, "true" if transaction.sampled else "false") + % (trace_id, sample_rate, "true" if span.sampled else "false") ) - assert baggage.serialize() == expected_baggage + assert baggage.serialize() == SortedBaggage(expected_baggage) (envelope,) = envelopes assert envelope.headers["trace"] == baggage.dynamic_sampling_context() @@ -194,41 +166,12 @@ def test_dynamic_sampling_head_sdk_creates_dsc( "environment": "production", "release": "foo", "sample_rate": str(sample_rate), - "sampled": "true" if transaction.sampled else "false", + "sampled": "true" if span.sampled else "false", "transaction": "Head SDK tx", "trace_id": trace_id, } -@pytest.mark.parametrize( - "args,expected_refcount", - [({"traces_sample_rate": 1.0}, 100), ({"traces_sample_rate": 0.0}, 0)], -) -def test_memory_usage(sentry_init, capture_events, args, expected_refcount): - sentry_init(**args) - - references = weakref.WeakSet() - - with start_transaction(name="hi"): - for i in range(100): - with start_span(op="helloworld", name="hi {}".format(i)) as span: - - def foo(): - pass - - references.add(foo) - span.set_tag("foo", foo) - pass - - del foo - del span - - # required only for pypy (cpython frees immediately) - gc.collect() - - assert len(references) == expected_refcount - - def test_transactions_do_not_go_through_before_send(sentry_init, capture_events): def before_send(event, hint): raise RuntimeError("should not be called") @@ -236,7 +179,7 @@ def before_send(event, hint): sentry_init(traces_sample_rate=1.0, before_send=before_send) events = capture_events() - with start_transaction(name="/"): + with start_span(name="/"): pass assert len(events) == 1 @@ -254,7 +197,7 @@ def capture_event(self, event): sentry_init(traces_sample_rate=1, transport=CustomTransport()) events = capture_events() - with start_transaction(name="hi"): + with start_span(name="hi"): with start_span(op="bar", name="bardesc"): pass @@ -264,14 +207,14 @@ def capture_event(self, event): def test_trace_propagation_meta_head_sdk(sentry_init): sentry_init(traces_sample_rate=1.0, release="foo") - transaction = Transaction.continue_from_headers({}, name="Head SDK tx") meta = None span = None - with start_transaction(transaction): - with start_span(op="foo", name="foodesc") as current_span: - span = current_span - meta = sentry_sdk.get_current_scope().trace_propagation_meta() + with continue_trace({}): + with start_span(name="Head SDK tx") as root_span: + with start_span(op="foo", name="foodesc") as current_span: + span = current_span + meta = sentry_sdk.get_current_scope().trace_propagation_meta() ind = meta.find(">") + 1 sentry_trace, baggage = meta[:ind], meta[ind:] @@ -282,4 +225,4 @@ def test_trace_propagation_meta_head_sdk(sentry_init): assert 'meta name="baggage"' in baggage baggage_content = re.findall('content="([^"]*)"', baggage)[0] - assert baggage_content == transaction.get_baggage().serialize() + assert baggage_content == root_span.get_baggage().serialize() diff --git a/tests/tracing/test_misc.py b/tests/tracing/test_misc.py index 46ac8ad10f..0d12acc617 100644 --- a/tests/tracing/test_misc.py +++ b/tests/tracing/test_misc.py @@ -1,67 +1,43 @@ import pytest -import gc -import uuid -import os -from unittest import mock from unittest.mock import MagicMock import sentry_sdk -from sentry_sdk import start_span, start_transaction, set_measurement +from sentry_sdk import start_span, set_measurement, get_current_scope from sentry_sdk.consts import MATCH_ALL -from sentry_sdk.tracing import Span, Transaction from sentry_sdk.tracing_utils import should_propagate_trace from sentry_sdk.utils import Dsn -def test_span_trimming(sentry_init, capture_events): - sentry_init(traces_sample_rate=1.0, _experiments={"max_spans": 3}) - events = capture_events() - - with start_transaction(name="hi"): - for i in range(10): - with start_span(op="foo{}".format(i)): - pass - - (event,) = events - - assert len(event["spans"]) == 3 - - span1, span2, span3 = event["spans"] - assert span1["op"] == "foo0" - assert span2["op"] == "foo1" - assert span3["op"] == "foo2" - - def test_transaction_naming(sentry_init, capture_events): sentry_init(traces_sample_rate=1.0) events = capture_events() # default name in event if no name is passed - with start_transaction() as transaction: + with start_span(): pass assert len(events) == 1 - assert events[0]["transaction"] == "" + assert events[0]["transaction"] == "" # the name can be set once the transaction's already started - with start_transaction() as transaction: - transaction.name = "name-known-after-transaction-started" + with start_span() as span: + span.name = "name-known-after-transaction-started" assert len(events) == 2 assert events[1]["transaction"] == "name-known-after-transaction-started" # passing in a name works, too - with start_transaction(name="a"): + with start_span(name="a"): pass assert len(events) == 3 assert events[2]["transaction"] == "a" -def test_transaction_data(sentry_init, capture_events): +def test_root_span_data(sentry_init, capture_events): sentry_init(traces_sample_rate=1.0) events = capture_events() - with start_transaction(name="test-transaction"): - span_or_tx = sentry_sdk.get_current_span() - span_or_tx.set_data("foo", "bar") + with start_span(name="test-root-span"): + root_span = sentry_sdk.get_current_span() + root_span.set_data("foo", "bar") with start_span(op="test-span") as span: span.set_data("spanfoo", "spanbar") @@ -82,175 +58,15 @@ def test_transaction_data(sentry_init, capture_events): assert span_data.items() >= {"spanfoo": "spanbar"}.items() -def test_start_transaction(sentry_init): - sentry_init(traces_sample_rate=1.0) - - # you can have it start a transaction for you - result1 = start_transaction( - name="/interactions/other-dogs/new-dog", op="greeting.sniff" - ) - assert isinstance(result1, Transaction) - assert result1.name == "/interactions/other-dogs/new-dog" - assert result1.op == "greeting.sniff" - - # or you can pass it an already-created transaction - preexisting_transaction = Transaction( - name="/interactions/other-dogs/new-dog", op="greeting.sniff" - ) - result2 = start_transaction(preexisting_transaction) - assert result2 is preexisting_transaction - - -def test_finds_transaction_on_scope(sentry_init): - sentry_init(traces_sample_rate=1.0) - - transaction = start_transaction(name="dogpark") - - scope = sentry_sdk.get_current_scope() - - # See note in Scope class re: getters and setters of the `transaction` - # property. For the moment, assigning to scope.transaction merely sets the - # transaction name, rather than putting the transaction on the scope, so we - # have to assign to _span directly. - scope._span = transaction - - # Reading scope.property, however, does what you'd expect, and returns the - # transaction on the scope. - assert scope.transaction is not None - assert isinstance(scope.transaction, Transaction) - assert scope.transaction.name == "dogpark" - - # If the transaction is also set as the span on the scope, it can be found - # by accessing _span, too. - assert scope._span is not None - assert isinstance(scope._span, Transaction) - assert scope._span.name == "dogpark" - - -def test_finds_transaction_when_descendent_span_is_on_scope( - sentry_init, -): - sentry_init(traces_sample_rate=1.0) - - transaction = start_transaction(name="dogpark") - child_span = transaction.start_child(op="sniffing") - - scope = sentry_sdk.get_current_scope() - scope._span = child_span - - # this is the same whether it's the transaction itself or one of its - # decedents directly attached to the scope - assert scope.transaction is not None - assert isinstance(scope.transaction, Transaction) - assert scope.transaction.name == "dogpark" - - # here we see that it is in fact the span on the scope, rather than the - # transaction itself - assert scope._span is not None - assert isinstance(scope._span, Span) - assert scope._span.op == "sniffing" - - -def test_finds_orphan_span_on_scope(sentry_init): - # this is deprecated behavior which may be removed at some point (along with - # the start_span function) - sentry_init(traces_sample_rate=1.0) - - span = start_span(op="sniffing") - - scope = sentry_sdk.get_current_scope() - scope._span = span - - assert scope._span is not None - assert isinstance(scope._span, Span) - assert scope._span.op == "sniffing" - - -def test_finds_non_orphan_span_on_scope(sentry_init): - sentry_init(traces_sample_rate=1.0) - - transaction = start_transaction(name="dogpark") - child_span = transaction.start_child(op="sniffing") - - scope = sentry_sdk.get_current_scope() - scope._span = child_span - - assert scope._span is not None - assert isinstance(scope._span, Span) - assert scope._span.op == "sniffing" - - -def test_circular_references(monkeypatch, sentry_init, request): - # TODO: We discovered while writing this test about transaction/span - # reference cycles that there's actually also a circular reference in - # `serializer.py`, between the functions `_serialize_node` and - # `_serialize_node_impl`, both of which are defined inside of the main - # `serialize` function, and each of which calls the other one. For now, in - # order to avoid having those ref cycles give us a false positive here, we - # can mock out `serialize`. In the long run, though, we should probably fix - # that. (Whenever we do work on fixing it, it may be useful to add - # - # gc.set_debug(gc.DEBUG_LEAK) - # request.addfinalizer(lambda: gc.set_debug(~gc.DEBUG_LEAK)) - # - # immediately after the initial collection below, so we can see what new - # objects the garbage collector has to clean up once `transaction.finish` is - # called and the serializer runs.) - monkeypatch.setattr( - sentry_sdk.client, - "serialize", - mock.Mock( - return_value=None, - ), - ) - - # In certain versions of python, in some environments (specifically, python - # 3.4 when run in GH Actions), we run into a `ctypes` bug which creates - # circular references when `uuid4()` is called, as happens when we're - # generating event ids. Mocking it with an implementation which doesn't use - # the `ctypes` function lets us avoid having false positives when garbage - # collecting. See https://bugs.python.org/issue20519. - monkeypatch.setattr( - uuid, - "uuid4", - mock.Mock( - return_value=uuid.UUID(bytes=os.urandom(16)), - ), - ) - - gc.disable() - request.addfinalizer(gc.enable) - +def test_finds_spans_on_scope(sentry_init): sentry_init(traces_sample_rate=1.0) - # Make sure that we're starting with a clean slate before we start creating - # transaction/span reference cycles - gc.collect() - - dogpark_transaction = start_transaction(name="dogpark") - sniffing_span = dogpark_transaction.start_child(op="sniffing") - wagging_span = dogpark_transaction.start_child(op="wagging") - - # At some point, you have to stop sniffing - there are balls to chase! - so finish - # this span while the dogpark transaction is still open - sniffing_span.finish() + with start_span(name="dogpark") as root_span: + assert get_current_scope().span == root_span - # The wagging, however, continues long past the dogpark, so that span will - # NOT finish before the transaction ends. (Doing it in this order proves - # that both finished and unfinished spans get their cycles broken.) - dogpark_transaction.finish() - - # Eventually you gotta sleep... - wagging_span.finish() - - # assuming there are no cycles by this point, these should all be able to go - # out of scope and get their memory deallocated without the garbage - # collector having anything to do - del sniffing_span - del wagging_span - del dogpark_transaction - - assert gc.collect() == 0 + with start_span(name="child") as child_span: + assert get_current_scope().span == child_span + assert child_span.root_span == root_span def test_set_measurement(sentry_init, capture_events): @@ -258,21 +74,19 @@ def test_set_measurement(sentry_init, capture_events): events = capture_events() - transaction = start_transaction(name="measuring stuff") + with start_span(name="measuring stuff") as span: - with pytest.raises(TypeError): - transaction.set_measurement() + with pytest.raises(TypeError): + span.set_measurement() - with pytest.raises(TypeError): - transaction.set_measurement("metric.foo") + with pytest.raises(TypeError): + span.set_measurement("metric.foo") - transaction.set_measurement("metric.foo", 123) - transaction.set_measurement("metric.bar", 456, unit="second") - transaction.set_measurement("metric.baz", 420.69, unit="custom") - transaction.set_measurement("metric.foobar", 12, unit="percent") - transaction.set_measurement("metric.foobar", 17.99, unit="percent") - - transaction.finish() + span.set_measurement("metric.foo", 123) + span.set_measurement("metric.bar", 456, unit="second") + span.set_measurement("metric.baz", 420.69, unit="custom") + span.set_measurement("metric.foobar", 12, unit="percent") + span.set_measurement("metric.foobar", 17.99, unit="percent") (event,) = events assert event["measurements"]["metric.foo"] == {"value": 123, "unit": ""} @@ -286,7 +100,7 @@ def test_set_measurement_public_api(sentry_init, capture_events): events = capture_events() - with start_transaction(name="measuring stuff"): + with start_span(name="measuring stuff"): set_measurement("metric.foo", 123) set_measurement("metric.bar", 456, unit="second") @@ -374,68 +188,3 @@ def test_should_propagate_trace_to_sentry( client.transport.parsed_dsn = Dsn(dsn) assert should_propagate_trace(client, url) == expected_propagation_decision - - -def test_start_transaction_updates_scope_name_source(sentry_init): - sentry_init(traces_sample_rate=1.0) - - scope = sentry_sdk.get_current_scope() - - with start_transaction(name="foobar", source="route"): - assert scope._transaction == "foobar" - assert scope._transaction_info == {"source": "route"} - - -@pytest.mark.parametrize("sampled", (True, None)) -def test_transaction_dropped_debug_not_started(sentry_init, sampled): - sentry_init(enable_tracing=True) - - tx = Transaction(sampled=sampled) - - with mock.patch("sentry_sdk.tracing.logger") as mock_logger: - with tx: - pass - - mock_logger.debug.assert_any_call( - "Discarding transaction because it was not started with sentry_sdk.start_transaction" - ) - - with pytest.raises(AssertionError): - # We should NOT see the "sampled = False" message here - mock_logger.debug.assert_any_call( - "Discarding transaction because sampled = False" - ) - - -def test_transaction_dropped_sampled_false(sentry_init): - sentry_init(enable_tracing=True) - - tx = Transaction(sampled=False) - - with mock.patch("sentry_sdk.tracing.logger") as mock_logger: - with sentry_sdk.start_transaction(tx): - pass - - mock_logger.debug.assert_any_call("Discarding transaction because sampled = False") - - with pytest.raises(AssertionError): - # We should not see the "not started" message here - mock_logger.debug.assert_any_call( - "Discarding transaction because it was not started with sentry_sdk.start_transaction" - ) - - -def test_transaction_not_started_warning(sentry_init): - sentry_init(enable_tracing=True) - - tx = Transaction() - - with mock.patch("sentry_sdk.tracing.logger") as mock_logger: - with tx: - pass - - mock_logger.debug.assert_any_call( - "Transaction was entered without being started with sentry_sdk.start_transaction." - "The transaction will not be sent to Sentry. To fix, start the transaction by" - "passing it to sentry_sdk.start_transaction." - ) diff --git a/tests/tracing/test_propagation.py b/tests/tracing/test_propagation.py deleted file mode 100644 index 730bf2672b..0000000000 --- a/tests/tracing/test_propagation.py +++ /dev/null @@ -1,40 +0,0 @@ -import sentry_sdk -import pytest - - -def test_standalone_span_iter_headers(sentry_init): - sentry_init(enable_tracing=True) - - with sentry_sdk.start_span(op="test") as span: - with pytest.raises(StopIteration): - # We should not have any propagation headers - next(span.iter_headers()) - - -def test_span_in_span_iter_headers(sentry_init): - sentry_init(enable_tracing=True) - - with sentry_sdk.start_span(op="test"): - with sentry_sdk.start_span(op="test2") as span_inner: - with pytest.raises(StopIteration): - # We should not have any propagation headers - next(span_inner.iter_headers()) - - -def test_span_in_transaction(sentry_init): - sentry_init(enable_tracing=True) - - with sentry_sdk.start_transaction(op="test"): - with sentry_sdk.start_span(op="test2") as span: - # Ensure the headers are there - next(span.iter_headers()) - - -def test_span_in_span_in_transaction(sentry_init): - sentry_init(enable_tracing=True) - - with sentry_sdk.start_transaction(op="test"): - with sentry_sdk.start_span(op="test2"): - with sentry_sdk.start_span(op="test3") as span_inner: - # Ensure the headers are there - next(span_inner.iter_headers()) diff --git a/tests/tracing/test_sampling.py b/tests/tracing/test_sampling.py index 8ef362a1e8..db5a545b5c 100644 --- a/tests/tracing/test_sampling.py +++ b/tests/tracing/test_sampling.py @@ -5,7 +5,7 @@ import pytest import sentry_sdk -from sentry_sdk import start_span, start_transaction, capture_exception +from sentry_sdk import start_span, capture_exception from sentry_sdk.utils import logger @@ -41,26 +41,12 @@ def test_no_double_sampling(sentry_init, capture_events): sentry_init(traces_sample_rate=1.0, sample_rate=0.0) events = capture_events() - with start_transaction(name="/"): + with start_span(name="/"): pass assert len(events) == 1 -@pytest.mark.parametrize("sampling_decision", [True, False]) -def test_get_transaction_and_span_from_scope_regardless_of_sampling_decision( - sentry_init, sampling_decision -): - sentry_init(traces_sample_rate=1.0) - - with start_transaction(name="/", sampled=sampling_decision): - with start_span(op="child-span"): - with start_span(op="child-child-span"): - scope = sentry_sdk.get_current_scope() - assert scope.span.op == "child-child-span" - assert scope.transaction.name == "/" - - @pytest.mark.parametrize( "traces_sample_rate,expected_decision", [(0.0, False), (0.25, False), (0.75, True), (1.00, True)], @@ -73,7 +59,7 @@ def test_uses_traces_sample_rate_correctly( sentry_init(traces_sample_rate=traces_sample_rate) with mock.patch.object(random, "random", return_value=0.5): - transaction = start_transaction(name="dogpark") + transaction = start_span(name="dogpark") assert transaction.sampled is expected_decision @@ -89,8 +75,8 @@ def test_uses_traces_sampler_return_value_correctly( sentry_init(traces_sampler=mock.Mock(return_value=traces_sampler_return_value)) with mock.patch.object(random, "random", return_value=0.5): - transaction = start_transaction(name="dogpark") - assert transaction.sampled is expected_decision + with start_span(name="dogpark") as span: + assert span.sampled is expected_decision @pytest.mark.parametrize("traces_sampler_return_value", [True, False]) @@ -99,8 +85,8 @@ def test_tolerates_traces_sampler_returning_a_boolean( ): sentry_init(traces_sampler=mock.Mock(return_value=traces_sampler_return_value)) - transaction = start_transaction(name="dogpark") - assert transaction.sampled is traces_sampler_return_value + with start_span(name="dogpark") as span: + assert span.sampled is traces_sampler_return_value @pytest.mark.parametrize("sampling_decision", [True, False]) @@ -110,8 +96,8 @@ def test_only_captures_transaction_when_sampled_is_true( sentry_init(traces_sampler=mock.Mock(return_value=sampling_decision)) events = capture_events() - transaction = start_transaction(name="dogpark") - transaction.finish() + with start_span(name="dogpark"): + pass assert len(events) == (1 if sampling_decision else 0) @@ -132,9 +118,9 @@ def test_prefers_traces_sampler_to_traces_sample_rate( traces_sampler=traces_sampler, ) - transaction = start_transaction(name="dogpark") - assert traces_sampler.called is True - assert transaction.sampled is traces_sampler_return_value + with start_span(name="dogpark") as span: + assert traces_sampler.called is True + assert span.sampled is traces_sampler_return_value @pytest.mark.parametrize("parent_sampling_decision", [True, False]) @@ -168,8 +154,8 @@ def test_traces_sampler_doesnt_overwrite_explicitly_passed_sampling_decision( traces_sampler = mock.Mock(return_value=not explicit_decision) sentry_init(traces_sampler=traces_sampler) - transaction = start_transaction(name="dogpark", sampled=explicit_decision) - assert transaction.sampled is explicit_decision + with start_span(name="dogpark", sampled=explicit_decision) as span: + assert span.sampled is explicit_decision @pytest.mark.parametrize("parent_sampling_decision", [True, False]) @@ -190,9 +176,7 @@ def test_inherits_parent_sampling_decision_when_traces_sampler_undefined( with mock.patch.object(random, "random", return_value=mock_random_value): with sentry_sdk.continue_trace({"sentry-trace": sentry_trace_header}): with start_span(name="dogpark") as span: - pass - - assert span.sampled is parent_sampling_decision + assert span.sampled is parent_sampling_decision @pytest.mark.parametrize("parent_sampling_decision", [True, False]) @@ -222,11 +206,10 @@ def test_passes_attributes_from_start_span_to_traces_sampler( traces_sampler = mock.Mock() sentry_init(traces_sampler=traces_sampler) - start_transaction(attributes={"dogs": "yes", "cats": "maybe"}) - - traces_sampler.assert_any_call( - DictionaryContaining({"dogs": "yes", "cats": "maybe"}) - ) + with start_span(attributes={"dogs": "yes", "cats": "maybe"}): + traces_sampler.assert_any_call( + DictionaryContaining({"dogs": "yes", "cats": "maybe"}) + ) def test_sample_rate_affects_errors(sentry_init, capture_events): @@ -261,9 +244,11 @@ def test_warns_and_sets_sampled_to_false_on_invalid_traces_sampler_return_value( sentry_init(traces_sampler=mock.Mock(return_value=traces_sampler_return_value)) with mock.patch.object(logger, "warning", mock.Mock()): - transaction = start_transaction(name="dogpark") - logger.warning.assert_any_call(StringContaining("Given sample rate is invalid")) - assert transaction.sampled is False + with start_span(name="dogpark") as span: + logger.warning.assert_any_call( + StringContaining("Given sample rate is invalid") + ) + assert span.sampled is False @pytest.mark.parametrize( @@ -288,9 +273,8 @@ def test_records_lost_event_only_if_traces_sample_rate_enabled( sentry_init(traces_sample_rate=traces_sample_rate) record_lost_event_calls = capture_record_lost_event_calls() - transaction = start_transaction(name="dogpark") - assert transaction.sampled is sampled_output - transaction.finish() + with start_span(name="dogpark") as span: + assert span.sampled is sampled_output # Use Counter because order of calls does not matter assert Counter(record_lost_event_calls) == Counter(expected_record_lost_event_calls) @@ -318,9 +302,8 @@ def test_records_lost_event_only_if_traces_sampler_enabled( sentry_init(traces_sampler=traces_sampler) record_lost_event_calls = capture_record_lost_event_calls() - transaction = start_transaction(name="dogpark") - assert transaction.sampled is sampled_output - transaction.finish() + with start_span(name="dogpark") as span: + assert span.sampled is sampled_output # Use Counter because order of calls does not matter assert Counter(record_lost_event_calls) == Counter(expected_record_lost_event_calls) diff --git a/tests/tracing/test_span_name.py b/tests/tracing/test_span_name.py index 9c1768990a..d7d3772727 100644 --- a/tests/tracing/test_span_name.py +++ b/tests/tracing/test_span_name.py @@ -1,27 +1,11 @@ -import pytest - import sentry_sdk -def test_start_span_description(sentry_init, capture_events): - sentry_init(traces_sample_rate=1.0) - events = capture_events() - - with sentry_sdk.start_transaction(name="hi"): - with pytest.deprecated_call(): - with sentry_sdk.start_span(op="foo", description="span-desc"): - ... - - (event,) = events - - assert event["spans"][0]["description"] == "span-desc" - - def test_start_span_name(sentry_init, capture_events): sentry_init(traces_sample_rate=1.0) events = capture_events() - with sentry_sdk.start_transaction(name="hi"): + with sentry_sdk.start_span(name="hi"): with sentry_sdk.start_span(op="foo", name="span-name"): ... @@ -30,26 +14,11 @@ def test_start_span_name(sentry_init, capture_events): assert event["spans"][0]["description"] == "span-name" -def test_start_child_description(sentry_init, capture_events): - sentry_init(traces_sample_rate=1.0) - events = capture_events() - - with sentry_sdk.start_transaction(name="hi"): - with pytest.deprecated_call(): - with sentry_sdk.start_span(op="foo", description="span-desc") as span: - with span.start_child(op="bar", description="child-desc"): - ... - - (event,) = events - - assert event["spans"][-1]["description"] == "child-desc" - - def test_start_child_name(sentry_init, capture_events): sentry_init(traces_sample_rate=1.0) events = capture_events() - with sentry_sdk.start_transaction(name="hi"): + with sentry_sdk.start_span(name="hi"): with sentry_sdk.start_span(op="foo", name="span-name") as span: with span.start_child(op="bar", name="child-name"): ... diff --git a/tests/tracing/test_span_origin.py b/tests/tracing/test_span_origin.py index 16635871b3..649f704b1b 100644 --- a/tests/tracing/test_span_origin.py +++ b/tests/tracing/test_span_origin.py @@ -1,11 +1,11 @@ -from sentry_sdk import start_transaction, start_span +from sentry_sdk import start_span def test_span_origin_manual(sentry_init, capture_events): sentry_init(traces_sample_rate=1.0) events = capture_events() - with start_transaction(name="hi"): + with start_span(name="hi"): with start_span(op="foo", name="bar"): pass @@ -20,11 +20,11 @@ def test_span_origin_custom(sentry_init, capture_events): sentry_init(traces_sample_rate=1.0) events = capture_events() - with start_transaction(name="hi"): + with start_span(name="hi"): with start_span(op="foo", name="bar", origin="foo.foo2.foo3"): pass - with start_transaction(name="ho", origin="ho.ho2.ho3"): + with start_span(name="ho", origin="ho.ho2.ho3"): with start_span(op="baz", name="qux", origin="baz.baz2.baz3"): pass From 0b264d8fcbc53b530216ad56fb2cc2a0657ff53a Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Tue, 19 Nov 2024 14:02:41 +0100 Subject: [PATCH 113/244] Fix test scopes (#3801) --- .../integrations/opentelemetry/integration.py | 6 +++++- sentry_sdk/scope.py | 4 ++-- tests/test_scope.py | 19 +++++++++++++++---- 3 files changed, 22 insertions(+), 7 deletions(-) diff --git a/sentry_sdk/integrations/opentelemetry/integration.py b/sentry_sdk/integrations/opentelemetry/integration.py index 944326a124..016ce91b52 100644 --- a/sentry_sdk/integrations/opentelemetry/integration.py +++ b/sentry_sdk/integrations/opentelemetry/integration.py @@ -44,6 +44,7 @@ def setup_once(): "Use at your own risk." ) + _setup_scope_context_management() _setup_sentry_tracing() _patch_readable_span() # _setup_instrumentors() @@ -68,12 +69,15 @@ def sentry_patched_readable_span(self): Span._readable_span = sentry_patched_readable_span -def _setup_sentry_tracing(): +def _setup_scope_context_management(): # type: () -> None import opentelemetry.context opentelemetry.context._RUNTIME_CONTEXT = SentryContextVarsRuntimeContext() + +def _setup_sentry_tracing(): + # type: () -> None provider = TracerProvider(sampler=SentrySampler()) provider.add_span_processor(PotelSentrySpanProcessor()) trace.set_tracer_provider(provider) diff --git a/sentry_sdk/scope.py b/sentry_sdk/scope.py index 12b6c5aed6..54e6fc8928 100644 --- a/sentry_sdk/scope.py +++ b/sentry_sdk/scope.py @@ -376,7 +376,7 @@ def get_client(cls): This checks the current scope, the isolation scope and the global scope for a client. If no client is available a :py:class:`sentry_sdk.client.NonRecordingClient` is returned. """ - current_scope = cls._get_current_scope() + current_scope = cls.get_current_scope() try: client = current_scope.client except AttributeError: @@ -385,7 +385,7 @@ def get_client(cls): if client is not None and client.is_active(): return client - isolation_scope = cls._get_isolation_scope() + isolation_scope = cls.get_isolation_scope() try: client = isolation_scope.client except AttributeError: diff --git a/tests/test_scope.py b/tests/test_scope.py index 0dfa155d11..8b89d755b8 100644 --- a/tests/test_scope.py +++ b/tests/test_scope.py @@ -11,18 +11,29 @@ ) from sentry_sdk.client import Client, NonRecordingClient from sentry_sdk.scope import ( - Scope, + Scope as BaseScope, ScopeType, - use_isolation_scope, - use_scope, should_send_default_pii, ) +from sentry_sdk.integrations.opentelemetry.integration import ( + _setup_scope_context_management, +) +from sentry_sdk.integrations.opentelemetry.scope import ( + PotelScope as Scope, + use_scope, + use_isolation_scope, +) SLOTS_NOT_COPIED = {"client"} """__slots__ that are not copied when copying a Scope object.""" +@pytest.fixture(autouse=True) +def setup_otel_scope_management(): + _setup_scope_context_management() + + def test_copying(): s1 = Scope() s1.fingerprint = {} @@ -212,7 +223,7 @@ def test_get_isolation_scope(): def test_get_global_scope(): scope = Scope.get_global_scope() assert scope is not None - assert scope.__class__ == Scope + assert scope.__class__ == BaseScope assert scope._type == ScopeType.GLOBAL From f2ce1808fe42b8e1871450a764868dbec6ecae7c Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Tue, 19 Nov 2024 14:16:25 +0100 Subject: [PATCH 114/244] Fix starlette status tests --- tests/integrations/starlette/test_starlette.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/tests/integrations/starlette/test_starlette.py b/tests/integrations/starlette/test_starlette.py index a45c900f12..1380b2b386 100644 --- a/tests/integrations/starlette/test_starlette.py +++ b/tests/integrations/starlette/test_starlette.py @@ -18,6 +18,7 @@ StarletteRequestExtractor, ) from sentry_sdk.utils import parse_version +from tests.conftest import ApproxDict import starlette from starlette.authentication import ( @@ -765,7 +766,7 @@ def test_middleware_callback_spans(sentry_init, capture_events): for span in transaction_event["spans"]: assert span["op"] == expected[idx]["op"] assert span["description"] == expected[idx]["description"] - assert span["tags"] == expected[idx]["tags"] + assert span["tags"] == ApproxDict(expected[idx]["tags"]) idx += 1 @@ -856,7 +857,7 @@ def test_middleware_partial_receive_send(sentry_init, capture_events): for span in transaction_event["spans"]: assert span["op"] == expected[idx]["op"] assert span["description"].startswith(expected[idx]["description"]) - assert span["tags"] == expected[idx]["tags"] + assert span["tags"] == ApproxDict(expected[idx]["tags"]) idx += 1 From f261f280e23a3c2a0065ac525c2b2a2e71ac779d Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Tue, 19 Nov 2024 14:17:24 +0100 Subject: [PATCH 115/244] Fix deferred tracing decisions for twp (#3802) --- .../integrations/opentelemetry/sampler.py | 11 ++- .../integrations/opentelemetry/scope.py | 11 ++- sentry_sdk/tracing.py | 2 + .../integrations/opentelemetry/test_compat.py | 6 +- .../opentelemetry/test_experimental.py | 47 ---------- .../integrations/opentelemetry/test_potel.py | 36 ++++---- .../opentelemetry/test_sampler.py | 85 ++++++++----------- 7 files changed, 78 insertions(+), 120 deletions(-) delete mode 100644 tests/integrations/opentelemetry/test_experimental.py diff --git a/sentry_sdk/integrations/opentelemetry/sampler.py b/sentry_sdk/integrations/opentelemetry/sampler.py index 5fa41d28fc..0631c0b19e 100644 --- a/sentry_sdk/integrations/opentelemetry/sampler.py +++ b/sentry_sdk/integrations/opentelemetry/sampler.py @@ -34,10 +34,14 @@ def get_parent_sampled(parent_context, trace_id): # Only inherit sample rate if `traceId` is the same if is_span_context_valid and parent_context.trace_id == trace_id: # this is getSamplingDecision in JS + # if there was no sampling flag, defer the decision + dsc_sampled = parent_context.trace_state.get(TRACESTATE_SAMPLED_KEY) + if dsc_sampled == "deferred": + return None + if parent_context.trace_flags.sampled is not None: return parent_context.trace_flags.sampled - dsc_sampled = parent_context.trace_state.get(TRACESTATE_SAMPLED_KEY) if dsc_sampled == "true": return True elif dsc_sampled == "false": @@ -53,6 +57,8 @@ def dropped_result(parent_span_context, attributes, sample_rate=None): if TRACESTATE_SAMPLED_KEY not in trace_state: trace_state = trace_state.add(TRACESTATE_SAMPLED_KEY, "false") + elif trace_state.get(TRACESTATE_SAMPLED_KEY) == "deferred": + trace_state = trace_state.update(TRACESTATE_SAMPLED_KEY, "false") if sample_rate and TRACESTATE_SAMPLE_RATE_KEY not in trace_state: trace_state = trace_state.add(TRACESTATE_SAMPLE_RATE_KEY, str(sample_rate)) @@ -88,6 +94,9 @@ def sampled_result(span_context, attributes, sample_rate): if TRACESTATE_SAMPLED_KEY not in trace_state: trace_state = trace_state.add(TRACESTATE_SAMPLED_KEY, "true") + elif trace_state.get(TRACESTATE_SAMPLED_KEY) == "deferred": + trace_state = trace_state.update(TRACESTATE_SAMPLED_KEY, "true") + if TRACESTATE_SAMPLE_RATE_KEY not in trace_state: trace_state = trace_state.add(TRACESTATE_SAMPLE_RATE_KEY, str(sample_rate)) diff --git a/sentry_sdk/integrations/opentelemetry/scope.py b/sentry_sdk/integrations/opentelemetry/scope.py index 56df9a774a..f6df844109 100644 --- a/sentry_sdk/integrations/opentelemetry/scope.py +++ b/sentry_sdk/integrations/opentelemetry/scope.py @@ -6,6 +6,7 @@ SpanContext, NonRecordingSpan, TraceFlags, + TraceState, use_span, ) @@ -14,6 +15,7 @@ SENTRY_FORK_ISOLATION_SCOPE_KEY, SENTRY_USE_CURRENT_SCOPE_KEY, SENTRY_USE_ISOLATION_SCOPE_KEY, + TRACESTATE_SAMPLED_KEY, ) from sentry_sdk.integrations.opentelemetry.utils import trace_state_from_baggage from sentry_sdk.scope import Scope, ScopeType @@ -96,10 +98,15 @@ def _incoming_otel_span_context(self): else TraceFlags.DEFAULT ) - # TODO-neel-potel do we need parent and sampled like JS? - trace_state = None if self._propagation_context.baggage: trace_state = trace_state_from_baggage(self._propagation_context.baggage) + else: + trace_state = TraceState() + + # for twp to work, we also need to consider deferred sampling when the sampling + # flag is not present, so the above TraceFlags are not sufficient + if self._propagation_context.parent_sampled is None: + trace_state = trace_state.add(TRACESTATE_SAMPLED_KEY, "deferred") span_context = SpanContext( trace_id=int(self._propagation_context.trace_id, 16), # type: ignore diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py index 18b18ba8ef..56816c1328 100644 --- a/sentry_sdk/tracing.py +++ b/sentry_sdk/tracing.py @@ -1311,6 +1311,8 @@ def __exit__(self, ty, value, tb): # type: (Optional[Any], Optional[Any], Optional[Any]) -> None if value is not None: self.set_status(SPANSTATUS.INTERNAL_ERROR) + else: + self.set_status(SPANSTATUS.OK) self.finish() context.detach(self._ctx_token) diff --git a/tests/integrations/opentelemetry/test_compat.py b/tests/integrations/opentelemetry/test_compat.py index ece08ec900..f2292d9ff2 100644 --- a/tests/integrations/opentelemetry/test_compat.py +++ b/tests/integrations/opentelemetry/test_compat.py @@ -9,7 +9,7 @@ def test_transaction_name_span_description_compat( events = capture_events() - with sentry_sdk.start_transaction( + with sentry_sdk.start_span( name="trx-name", op="trx-op", ) as trx: @@ -33,13 +33,12 @@ def test_transaction_name_span_description_compat( assert spn.__class__.__name__ == "POTelSpan" assert spn.op == "span-op" assert spn.description == "span-desc" - assert spn.name is None + assert spn.name == "span-desc" assert spn._otel_span is not None assert spn._otel_span.name == "span-desc" assert spn._otel_span.attributes["sentry.op"] == "span-op" assert spn._otel_span.attributes["sentry.description"] == "span-desc" - assert "sentry.name" not in spn._otel_span.attributes transaction = events[0] assert transaction["transaction"] == "trx-name" @@ -53,4 +52,3 @@ def test_transaction_name_span_description_compat( assert span["op"] == "span-op" assert span["data"]["sentry.op"] == "span-op" assert span["data"]["sentry.description"] == "span-desc" - assert "sentry.name" not in span["data"] diff --git a/tests/integrations/opentelemetry/test_experimental.py b/tests/integrations/opentelemetry/test_experimental.py deleted file mode 100644 index 8e4b703361..0000000000 --- a/tests/integrations/opentelemetry/test_experimental.py +++ /dev/null @@ -1,47 +0,0 @@ -from unittest.mock import MagicMock, patch - -import pytest - - -@pytest.mark.forked -def test_integration_enabled_if_option_is_on(sentry_init, reset_integrations): - mocked_setup_once = MagicMock() - - with patch( - "sentry_sdk.integrations.opentelemetry.integration.OpenTelemetryIntegration.setup_once", - mocked_setup_once, - ): - sentry_init( - _experiments={ - "otel_powered_performance": True, - }, - ) - mocked_setup_once.assert_called_once() - - -@pytest.mark.forked -def test_integration_not_enabled_if_option_is_off(sentry_init, reset_integrations): - mocked_setup_once = MagicMock() - - with patch( - "sentry_sdk.integrations.opentelemetry.integration.OpenTelemetryIntegration.setup_once", - mocked_setup_once, - ): - sentry_init( - _experiments={ - "otel_powered_performance": False, - }, - ) - mocked_setup_once.assert_not_called() - - -@pytest.mark.forked -def test_integration_not_enabled_if_option_is_missing(sentry_init, reset_integrations): - mocked_setup_once = MagicMock() - - with patch( - "sentry_sdk.integrations.opentelemetry.integration.OpenTelemetryIntegration.setup_once", - mocked_setup_once, - ): - sentry_init() - mocked_setup_once.assert_not_called() diff --git a/tests/integrations/opentelemetry/test_potel.py b/tests/integrations/opentelemetry/test_potel.py index 2b972addd1..39c48f8cc8 100644 --- a/tests/integrations/opentelemetry/test_potel.py +++ b/tests/integrations/opentelemetry/test_potel.py @@ -1,8 +1,8 @@ import pytest - from opentelemetry import trace import sentry_sdk +from tests.conftest import ApproxDict tracer = trace.get_tracer(__name__) @@ -43,7 +43,6 @@ def test_root_span_transaction_payload_started_with_otel_only(capture_envelopes) assert "span_id" in trace_context assert trace_context["origin"] == "manual" assert trace_context["op"] == "request" - assert trace_context["status"] == "ok" assert payload["spans"] == [] @@ -63,7 +62,6 @@ def test_child_span_payload_started_with_otel_only(capture_envelopes): assert span["op"] == "db" assert span["description"] == "db" assert span["origin"] == "manual" - assert span["status"] == "ok" assert span["span_id"] is not None assert span["trace_id"] == payload["contexts"]["trace"]["trace_id"] assert span["parent_span_id"] == payload["contexts"]["trace"]["span_id"] @@ -222,8 +220,8 @@ def test_span_attributes_in_data_started_with_otel(capture_envelopes): (item,) = envelope.items payload = item.payload.json - assert payload["contexts"]["trace"]["data"] == {"foo": "bar", "baz": 42} - assert payload["spans"][0]["data"] == {"abc": 99, "def": "moo"} + assert payload["contexts"]["trace"]["data"] == ApproxDict({"foo": "bar", "baz": 42}) + assert payload["spans"][0]["data"] == ApproxDict({"abc": 99, "def": "moo"}) def test_span_data_started_with_sentry(capture_envelopes): @@ -238,18 +236,22 @@ def test_span_data_started_with_sentry(capture_envelopes): (item,) = envelope.items payload = item.payload.json - assert payload["contexts"]["trace"]["data"] == { - "foo": "bar", - "sentry.origin": "manual", - "sentry.description": "request", - "sentry.op": "http", - } - assert payload["spans"][0]["data"] == { - "baz": 42, - "sentry.origin": "manual", - "sentry.description": "statement", - "sentry.op": "db", - } + assert payload["contexts"]["trace"]["data"] == ApproxDict( + { + "foo": "bar", + "sentry.origin": "manual", + "sentry.description": "request", + "sentry.op": "http", + } + ) + assert payload["spans"][0]["data"] == ApproxDict( + { + "baz": 42, + "sentry.origin": "manual", + "sentry.description": "statement", + "sentry.op": "db", + } + ) def test_transaction_tags_started_with_otel(capture_envelopes): diff --git a/tests/integrations/opentelemetry/test_sampler.py b/tests/integrations/opentelemetry/test_sampler.py index dfd4981ecf..9e67eb7921 100644 --- a/tests/integrations/opentelemetry/test_sampler.py +++ b/tests/integrations/opentelemetry/test_sampler.py @@ -71,17 +71,13 @@ def test_sampling_traces_sample_rate_50(sentry_init, capture_envelopes): envelopes = capture_envelopes() - with mock.patch( - "sentry_sdk.integrations.opentelemetry.sampler.random", return_value=0.2 - ): # drop + with mock.patch("random.random", return_value=0.2): # drop with sentry_sdk.start_span(description="request a"): with sentry_sdk.start_span(description="cache a"): with sentry_sdk.start_span(description="db a"): ... - with mock.patch( - "sentry_sdk.integrations.opentelemetry.sampler.random", return_value=0.7 - ): # keep + with mock.patch("random.random", return_value=0.7): # keep with sentry_sdk.start_span(description="request b"): with sentry_sdk.start_span(description="cache b"): with sentry_sdk.start_span(description="db b"): @@ -101,46 +97,34 @@ def test_sampling_traces_sample_rate_50(sentry_init, capture_envelopes): def test_sampling_traces_sampler(sentry_init, capture_envelopes): def keep_only_a(sampling_context): if " a" in sampling_context["transaction_context"]["name"]: - return 0.05 + return 1 else: return 0 - sentry_init( - traces_sample_rate=1.0, - traces_sampler=keep_only_a, - ) + sentry_init(traces_sampler=keep_only_a) envelopes = capture_envelopes() - # Make sure random() always returns the same values - with mock.patch( - "sentry_sdk.integrations.opentelemetry.sampler.random", - side_effect=[0.04 for _ in range(12)], - ): - - with sentry_sdk.start_span(description="request a"): # keep - with sentry_sdk.start_span(description="cache a"): # keep - with sentry_sdk.start_span(description="db a"): # keep - ... + # children inherit from root spans + with sentry_sdk.start_span(description="request a"): # keep + with sentry_sdk.start_span(description="cache a"): + with sentry_sdk.start_span(description="db a"): + ... - with sentry_sdk.start_span(description="request b"): # drop - with sentry_sdk.start_span(description="cache b"): # drop - with sentry_sdk.start_span(description="db b"): # drop - ... + with sentry_sdk.start_span(description="request b"): # drop + with sentry_sdk.start_span(description="cache b"): + with sentry_sdk.start_span(description="db b"): + ... - with sentry_sdk.start_span(description="request c"): # drop - with sentry_sdk.start_span( - description="cache a c" - ): # keep (but trx dropped, so not collected) - with sentry_sdk.start_span( - description="db a c" - ): # keep (but trx dropped, so not collected) - ... + with sentry_sdk.start_span(description="request c"): # drop + with sentry_sdk.start_span(description="cache a c"): + with sentry_sdk.start_span(description="db a c"): + ... - with sentry_sdk.start_span(description="new a c"): # keep - with sentry_sdk.start_span(description="cache c"): # drop - with sentry_sdk.start_span(description="db c"): # drop - ... + with sentry_sdk.start_span(description="new a c"): # keep + with sentry_sdk.start_span(description="cache c"): + with sentry_sdk.start_span(description="db c"): + ... assert len(envelopes) == 2 (envelope1, envelope2) = envelopes @@ -150,7 +134,7 @@ def keep_only_a(sampling_context): assert transaction1["transaction"] == "request a" assert len(transaction1["spans"]) == 2 assert transaction2["transaction"] == "new a c" - assert len(transaction2["spans"]) == 0 + assert len(transaction2["spans"]) == 2 def test_sampling_traces_sampler_boolean(sentry_init, capture_envelopes): @@ -168,13 +152,13 @@ def keep_only_a(sampling_context): envelopes = capture_envelopes() with sentry_sdk.start_span(description="request a"): # keep - with sentry_sdk.start_span(description="cache a"): # keep - with sentry_sdk.start_span(description="db X"): # drop + with sentry_sdk.start_span(description="cache a"): + with sentry_sdk.start_span(description="db X"): ... with sentry_sdk.start_span(description="request b"): # drop - with sentry_sdk.start_span(description="cache b"): # drop - with sentry_sdk.start_span(description="db b"): # drop + with sentry_sdk.start_span(description="cache b"): + with sentry_sdk.start_span(description="db b"): ... assert len(envelopes) == 1 @@ -182,7 +166,7 @@ def keep_only_a(sampling_context): transaction = envelope.items[0].payload.json assert transaction["transaction"] == "request a" - assert len(transaction["spans"]) == 1 + assert len(transaction["spans"]) == 2 @pytest.mark.parametrize( @@ -237,21 +221,24 @@ def test_sampling_parent_sampled( @pytest.mark.parametrize( - "traces_sample_rate, expected_num_of_envelopes", + "traces_sample_rate, upstream_sampled, expected_num_of_envelopes", [ # special case for testing, do not pass any traces_sample_rate to init() (the default traces_sample_rate=None will be used) - (-1, 0), + (-1, 0, 0), # traces_sample_rate=None means do not create new traces, and also do not continue incoming traces. So, no envelopes at all. - (None, 0), + (None, 1, 0), # traces_sample_rate=0 means do not create new traces (0% of the requests), but continue incoming traces. So envelopes will be created only if there is an incoming trace. - (0, 0), + (0, 0, 0), + (0, 1, 1), # traces_sample_rate=1 means create new traces for 100% of requests (and also continue incoming traces, of course). - (1, 1), + (1, 0, 0), + (1, 1, 1), ], ) def test_sampling_parent_dropped( sentry_init, traces_sample_rate, + upstream_sampled, expected_num_of_envelopes, capture_envelopes, ): @@ -265,7 +252,7 @@ def test_sampling_parent_dropped( # The upstream service has dropped the request headers = { - "sentry-trace": "771a43a4192642f0b136d5159a501700-1234567890abcdef-0", + "sentry-trace": f"771a43a4192642f0b136d5159a501700-1234567890abcdef-{upstream_sampled}", } with sentry_sdk.continue_trace(headers): with sentry_sdk.start_span(description="request a"): From 0057c3eb9ce82196a6e3c4d0cddd1c1676c1fb35 Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Tue, 19 Nov 2024 14:46:39 +0100 Subject: [PATCH 116/244] Fix litestar approxdict --- tests/integrations/litestar/test_litestar.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/tests/integrations/litestar/test_litestar.py b/tests/integrations/litestar/test_litestar.py index 90346537a7..d9f98c38de 100644 --- a/tests/integrations/litestar/test_litestar.py +++ b/tests/integrations/litestar/test_litestar.py @@ -5,6 +5,7 @@ from sentry_sdk import capture_message from sentry_sdk.integrations.litestar import LitestarIntegration +from tests.conftest import ApproxDict from typing import Any @@ -202,7 +203,7 @@ def is_matching_span(expected_span, actual_span): return ( expected_span["op"] == actual_span["op"] and expected_span["description"] == actual_span["description"] - and expected_span["tags"] == actual_span["tags"] + and ApproxDict(expected_span["tags"]) == actual_span["tags"] ) actual_litestar_spans = list( @@ -298,7 +299,7 @@ def is_matching_span(expected_span, actual_span): return ( expected_span["op"] == actual_span["op"] and actual_span["description"].startswith(expected_span["description"]) - and expected_span["tags"] == actual_span["tags"] + and ApproxDict(expected_span["tags"]) == actual_span["tags"] ) actual_litestar_spans = list( From c8c205cfb5ab23527fb48bb255d3e8fe5e11c072 Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Tue, 19 Nov 2024 14:58:10 +0100 Subject: [PATCH 117/244] Fix starlite approxdict --- .gitignore | 1 + tests/integrations/starlite/test_starlite.py | 5 +++-- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/.gitignore b/.gitignore index 8c7a5f2174..c9724e80d5 100644 --- a/.gitignore +++ b/.gitignore @@ -4,6 +4,7 @@ *.db *.pid .python-version +.tool-versions .coverage .coverage-sentry* coverage.xml diff --git a/tests/integrations/starlite/test_starlite.py b/tests/integrations/starlite/test_starlite.py index 2c3aa704f5..79fa755608 100644 --- a/tests/integrations/starlite/test_starlite.py +++ b/tests/integrations/starlite/test_starlite.py @@ -5,6 +5,7 @@ from sentry_sdk import capture_message from sentry_sdk.integrations.starlite import StarliteIntegration +from tests.conftest import ApproxDict from typing import Any, Dict @@ -199,7 +200,7 @@ def is_matching_span(expected_span, actual_span): return ( expected_span["op"] == actual_span["op"] and expected_span["description"] == actual_span["description"] - and expected_span["tags"] == actual_span["tags"] + and ApproxDict(expected_span["tags"]) == actual_span["tags"] ) actual_starlite_spans = list( @@ -295,7 +296,7 @@ def is_matching_span(expected_span, actual_span): return ( expected_span["op"] == actual_span["op"] and actual_span["description"].startswith(expected_span["description"]) - and expected_span["tags"] == actual_span["tags"] + and ApproxDict(expected_span["tags"]) == actual_span["tags"] ) actual_starlite_spans = list( From cae5a322a1dff1641ee4afdd8d7d93d4f258d0ed Mon Sep 17 00:00:00 2001 From: Ivana Kellyer Date: Tue, 19 Nov 2024 18:25:49 +0100 Subject: [PATCH 118/244] Fix asyncpg breadcrumb serialization (#3809) --- tests/integrations/asyncpg/test_asyncpg.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/integrations/asyncpg/test_asyncpg.py b/tests/integrations/asyncpg/test_asyncpg.py index aae80770bf..4604557a4a 100644 --- a/tests/integrations/asyncpg/test_asyncpg.py +++ b/tests/integrations/asyncpg/test_asyncpg.py @@ -226,7 +226,7 @@ async def test_record_params(sentry_init, capture_events) -> None: { "category": "query", "data": { - "db.params": "('Bob', 'secret_pw', datetime.date(1984, 3, 1))", + "db.params": ["Bob", "secret_pw", "datetime.date(1984, 3, 1)"], "db.paramstyle": "format", }, "message": "INSERT INTO users(name, password, dob) VALUES($1, $2, $3)", From 83871a061ef362704c68f2edd055ad2ffc464ce5 Mon Sep 17 00:00:00 2001 From: Ivana Kellyer Date: Wed, 20 Nov 2024 09:12:25 +0100 Subject: [PATCH 119/244] Fix clickhouse breadcrumbs (#3687) --- sentry_sdk/integrations/clickhouse_driver.py | 85 +++++++++---------- .../test_clickhouse_driver.py | 24 +++--- 2 files changed, 53 insertions(+), 56 deletions(-) diff --git a/sentry_sdk/integrations/clickhouse_driver.py b/sentry_sdk/integrations/clickhouse_driver.py index 014db14b68..0c83a30824 100644 --- a/sentry_sdk/integrations/clickhouse_driver.py +++ b/sentry_sdk/integrations/clickhouse_driver.py @@ -9,7 +9,7 @@ ensure_integration_enabled, ) -from typing import TYPE_CHECKING, TypeVar +from typing import TYPE_CHECKING, Any, Dict, TypeVar # Hack to get new Python features working in older versions # without introducing a hard dependency on `typing_extensions` @@ -94,17 +94,17 @@ def _inner(*args: P.args, **kwargs: P.kwargs) -> T: connection._sentry_span = span # type: ignore[attr-defined] - _set_db_data(span, connection) - - if should_send_default_pii(): - span.set_attribute("db.query.text", query) + data = _get_db_data(connection) + data["db.query.text"] = query if query_id: - span.set_attribute("db.query_id", query_id) + data["db.query_id"] = query_id if params and should_send_default_pii(): - connection._sentry_db_params = params - span.set_attribute("db.params", _serialize_span_attribute(params)) + data["db.params"] = params + + connection._sentry_db_data = data # type: ignore[attr-defined] + _set_on_span(span, data) # run the original code ret = f(*args, **kwargs) @@ -117,34 +117,31 @@ def _inner(*args: P.args, **kwargs: P.kwargs) -> T: def _wrap_end(f: Callable[P, T]) -> Callable[P, T]: def _inner_end(*args: P.args, **kwargs: P.kwargs) -> T: res = f(*args, **kwargs) - instance = args[0] - span = getattr(instance.connection, "_sentry_span", None) # type: ignore[attr-defined] + connection = args[0].connection + span = getattr(connection, "_sentry_span", None) # type: ignore[attr-defined] if span is not None: + data = getattr(connection, "_sentry_db_data", {}) + if res is not None and should_send_default_pii(): + data["db.result"] = res span.set_attribute("db.result", _serialize_span_attribute(res)) with capture_internal_exceptions(): - query = span.get_attribute("db.query.text") + query = data.pop("db.query.text", None) if query: - data = {} - for attr in ( - "db.params", - "db.result", - SPANDATA.DB_SYSTEM, - SPANDATA.DB_USER, - SPANDATA.SERVER_ADDRESS, - SPANDATA.SERVER_PORT, - ): - if span.get_attribute(attr): - data[attr] = span.get_attribute(attr) - sentry_sdk.add_breadcrumb( message=query, category="query", data=data ) span.finish() + try: + del connection._sentry_db_data + del connection._sentry_span + except AttributeError: + pass + return res return _inner_end @@ -152,34 +149,36 @@ def _inner_end(*args: P.args, **kwargs: P.kwargs) -> T: def _wrap_send_data(f: Callable[P, T]) -> Callable[P, T]: def _inner_send_data(*args: P.args, **kwargs: P.kwargs) -> T: - instance = args[0] # type: clickhouse_driver.client.Client - data = args[2] - span = getattr(instance.connection, "_sentry_span", None) + connection = args[0].connection + db_params_data = args[2] + span = getattr(connection, "_sentry_span", None) if span is not None: - _set_db_data(span, instance.connection) + data = _get_db_data(connection) + _set_on_span(span, data) if should_send_default_pii(): - db_params = ( - getattr(instance.connection, "_sentry_db_params", None) or [] - ) - db_params.extend(data) + saved_db_data = getattr(connection, "_sentry_db_data", {}) + db_params = saved_db_data.get("db.params") or [] + db_params.extend(db_params_data) + saved_db_data["db.params"] = db_params span.set_attribute("db.params", _serialize_span_attribute(db_params)) - try: - del instance.connection._sentry_db_params - except AttributeError: - pass return f(*args, **kwargs) return _inner_send_data -def _set_db_data( - span: Span, connection: clickhouse_driver.connection.Connection -) -> None: - span.set_attribute(SPANDATA.DB_SYSTEM, "clickhouse") - span.set_attribute(SPANDATA.SERVER_ADDRESS, connection.host) - span.set_attribute(SPANDATA.SERVER_PORT, connection.port) - span.set_attribute(SPANDATA.DB_NAME, connection.database) - span.set_attribute(SPANDATA.DB_USER, connection.user) +def _get_db_data(connection: clickhouse_driver.connection.Connection) -> Dict[str, str]: + return { + SPANDATA.DB_SYSTEM: "clickhouse", + SPANDATA.SERVER_ADDRESS: connection.host, + SPANDATA.SERVER_PORT: connection.port, + SPANDATA.DB_NAME: connection.database, + SPANDATA.DB_USER: connection.user, + } + + +def _set_on_span(span: Span, data: Dict[str, Any]): + for key, value in data.items(): + span.set_attribute(key, _serialize_span_attribute(value)) diff --git a/tests/integrations/clickhouse_driver/test_clickhouse_driver.py b/tests/integrations/clickhouse_driver/test_clickhouse_driver.py index 5da77ce13d..381cbaafd1 100644 --- a/tests/integrations/clickhouse_driver/test_clickhouse_driver.py +++ b/tests/integrations/clickhouse_driver/test_clickhouse_driver.py @@ -168,7 +168,7 @@ def test_clickhouse_client_breadcrumbs_with_pii(sentry_init, capture_events) -> "db.user": "default", "server.address": "localhost", "server.port": 9000, - "db.params": '[{"x": 100}]', + "db.params": [{"x": 100}], }, "message": "INSERT INTO test (x) VALUES", "type": "default", @@ -181,7 +181,7 @@ def test_clickhouse_client_breadcrumbs_with_pii(sentry_init, capture_events) -> "db.user": "default", "server.address": "localhost", "server.port": 9000, - "db.params": "[[170], [200]]", + "db.params": [[170], [200]], }, "message": "INSERT INTO test (x) VALUES", "type": "default", @@ -194,8 +194,8 @@ def test_clickhouse_client_breadcrumbs_with_pii(sentry_init, capture_events) -> "db.user": "default", "server.address": "localhost", "server.port": 9000, - "db.result": "[[370]]", - "db.params": '{"minv": 150}', + "db.result": [[370]], + "db.params": {"minv": 150}, }, "message": "SELECT sum(x) FROM test WHERE x > 150", "type": "default", @@ -348,9 +348,7 @@ def test_clickhouse_client_spans( assert event["spans"] == expected_spans -def test_clickhouse_client_spans_with_pii( - sentry_init, capture_events, capture_envelopes -) -> None: +def test_clickhouse_client_spans_with_pii(sentry_init, capture_events) -> None: sentry_init( integrations=[ClickhouseDriverIntegration()], _experiments={"record_sql_params": True}, @@ -620,7 +618,7 @@ def test_clickhouse_dbapi_breadcrumbs_with_pii(sentry_init, capture_events) -> N "db.user": "default", "server.address": "localhost", "server.port": 9000, - "db.result": "[[], []]", + "db.result": [[], []], }, "message": "DROP TABLE IF EXISTS test", "type": "default", @@ -633,7 +631,7 @@ def test_clickhouse_dbapi_breadcrumbs_with_pii(sentry_init, capture_events) -> N "db.user": "default", "server.address": "localhost", "server.port": 9000, - "db.result": "[[], []]", + "db.result": [[], []], }, "message": "CREATE TABLE test (x Int32) ENGINE = Memory", "type": "default", @@ -646,7 +644,7 @@ def test_clickhouse_dbapi_breadcrumbs_with_pii(sentry_init, capture_events) -> N "db.user": "default", "server.address": "localhost", "server.port": 9000, - "db.params": '[{"x": 100}]', + "db.params": [{"x": 100}], }, "message": "INSERT INTO test (x) VALUES", "type": "default", @@ -659,7 +657,7 @@ def test_clickhouse_dbapi_breadcrumbs_with_pii(sentry_init, capture_events) -> N "db.user": "default", "server.address": "localhost", "server.port": 9000, - "db.params": "[[170], [200]]", + "db.params": [[170], [200]], }, "message": "INSERT INTO test (x) VALUES", "type": "default", @@ -672,8 +670,8 @@ def test_clickhouse_dbapi_breadcrumbs_with_pii(sentry_init, capture_events) -> N "db.user": "default", "server.address": "localhost", "server.port": 9000, - "db.params": '{"minv": 150}', - "db.result": '[[["370"]], [["\'sum(x)\'", "\'Int64\'"]]]', + "db.params": {"minv": 150}, + "db.result": [[["370"]], [["'sum(x)'", "'Int64'"]]], }, "message": "SELECT sum(x) FROM test WHERE x > 150", "type": "default", From 25d311eaa094fd1ee182f87e239a108eeeb0e1a6 Mon Sep 17 00:00:00 2001 From: Ivana Kellyer Date: Thu, 21 Nov 2024 16:51:38 +0100 Subject: [PATCH 120/244] Extract span attrs from RQ job object & fix tests (#3786) --- MIGRATION_GUIDE.md | 12 +++++ .../opentelemetry/potel_span_processor.py | 4 +- .../integrations/opentelemetry/utils.py | 17 +++++-- sentry_sdk/integrations/rq.py | 45 +++++++++++++++++-- tests/integrations/rq/test_rq.py | 41 +++++++---------- 5 files changed, 85 insertions(+), 34 deletions(-) diff --git a/MIGRATION_GUIDE.md b/MIGRATION_GUIDE.md index 5d0777c22a..215dd4e5a1 100644 --- a/MIGRATION_GUIDE.md +++ b/MIGRATION_GUIDE.md @@ -71,6 +71,18 @@ Looking to upgrade from Sentry SDK 2.x to 3.x? Here's a comprehensive list of wh | `client` | `client.address`, `client.port` | | full URL | `url.full` | +- If you're using the RQ integration, the `sampling_context` argument of `traces_sampler` doesn't contain the `rq_job` object anymore. Instead, the individual properties of the scope, if available, are accessible as follows: + + | RQ property | Sampling context key(s) | + | --------------- | ---------------------------- | + | `rq_job.args` | `rq.job.args` | + | `rq_job.kwargs` | `rq.job.kwargs` | + | `rq_job.func` | `rq.job.func` | + | `queue.name` | `messaging.destination.name` | + | `job.id` | `messaging.message.id` | + + Note that `rq.job.args`, `rq.job.kwargs`, and `rq.job.func` are serialized and not the actual objects on the job. + ### Removed - Spans no longer have a `description`. Use `name` instead. diff --git a/sentry_sdk/integrations/opentelemetry/potel_span_processor.py b/sentry_sdk/integrations/opentelemetry/potel_span_processor.py index 1736fcd25e..14636b9e37 100644 --- a/sentry_sdk/integrations/opentelemetry/potel_span_processor.py +++ b/sentry_sdk/integrations/opentelemetry/potel_span_processor.py @@ -179,8 +179,6 @@ def _root_span_to_transaction_event(self, span): transaction_name, transaction_source = extract_transaction_name_source(span) span_data = extract_span_data(span) - (_, description, status, http_status, _) = span_data - trace_context = get_trace_context(span, span_data=span_data) contexts = {"trace": trace_context} @@ -188,6 +186,8 @@ def _root_span_to_transaction_event(self, span): if profile_context: contexts["profile"] = profile_context + (_, description, _, http_status, _) = span_data + if http_status: contexts["response"] = {"status_code": http_status} diff --git a/sentry_sdk/integrations/opentelemetry/utils.py b/sentry_sdk/integrations/opentelemetry/utils.py index 6127ceba5c..673b334318 100644 --- a/sentry_sdk/integrations/opentelemetry/utils.py +++ b/sentry_sdk/integrations/opentelemetry/utils.py @@ -114,7 +114,6 @@ def extract_span_data(span): description = span.name status, http_status = extract_span_status(span) origin = None - if span.attributes is None: return (op, description, status, http_status, origin) @@ -133,11 +132,23 @@ def extract_span_data(span): rpc_service = span.attributes.get(SpanAttributes.RPC_SERVICE) if rpc_service: - return ("rpc", description, status, http_status, origin) + return ( + span.attributes.get(SentrySpanAttribute.OP) or "rpc", + description, + status, + http_status, + origin, + ) messaging_system = span.attributes.get(SpanAttributes.MESSAGING_SYSTEM) if messaging_system: - return ("message", description, status, http_status, origin) + return ( + span.attributes.get(SentrySpanAttribute.OP) or "message", + description, + status, + http_status, + origin, + ) faas_trigger = span.attributes.get(SpanAttributes.FAAS_TRIGGER) if faas_trigger: diff --git a/sentry_sdk/integrations/rq.py b/sentry_sdk/integrations/rq.py index e1f8982053..b097b253ce 100644 --- a/sentry_sdk/integrations/rq.py +++ b/sentry_sdk/integrations/rq.py @@ -6,6 +6,7 @@ from sentry_sdk.integrations.logging import ignore_logger from sentry_sdk.tracing import TRANSACTION_SOURCE_TASK from sentry_sdk.utils import ( + _serialize_span_attribute, capture_internal_exceptions, ensure_integration_enabled, event_from_exception, @@ -35,6 +36,15 @@ DEFAULT_TRANSACTION_NAME = "unknown RQ task" +JOB_PROPERTY_TO_ATTRIBUTE = { + "id": "messaging.message.id", +} + +QUEUE_PROPERTY_TO_ATTRIBUTE = { + "name": "messaging.destination.name", +} + + class RqIntegration(Integration): identifier = "rq" origin = f"auto.queue.{identifier}" @@ -54,8 +64,8 @@ def setup_once(): old_perform_job = Worker.perform_job @ensure_integration_enabled(RqIntegration, old_perform_job) - def sentry_patched_perform_job(self, job, *args, **kwargs): - # type: (Any, Job, *Queue, **Any) -> bool + def sentry_patched_perform_job(self, job, queue, *args, **kwargs): + # type: (Any, Job, Queue, *Any, **Any) -> bool with sentry_sdk.new_scope() as scope: try: transaction_name = job.func_name or DEFAULT_TRANSACTION_NAME @@ -76,9 +86,9 @@ def sentry_patched_perform_job(self, job, *args, **kwargs): name=transaction_name, source=TRANSACTION_SOURCE_TASK, origin=RqIntegration.origin, - custom_sampling_context={"rq_job": job}, + attributes=_prepopulate_attributes(job, queue), ): - rv = old_perform_job(self, job, *args, **kwargs) + rv = old_perform_job(self, job, queue, *args, **kwargs) if self.is_horse: # We're inside of a forked process and RQ is @@ -167,3 +177,30 @@ def _capture_exception(exc_info, **kwargs): ) sentry_sdk.capture_event(event, hint=hint) + + +def _prepopulate_attributes(job, queue): + # type: (Job, Queue) -> dict[str, Any] + attributes = { + "messaging.system": "rq", + } + + for prop, attr in JOB_PROPERTY_TO_ATTRIBUTE.items(): + if getattr(job, prop, None) is not None: + attributes[attr] = getattr(job, prop) + + for prop, attr in QUEUE_PROPERTY_TO_ATTRIBUTE.items(): + if getattr(queue, prop, None) is not None: + attributes[attr] = getattr(queue, prop) + + for key in ("args", "kwargs"): + if getattr(job, key, None): + attributes[f"rq.job.{key}"] = _serialize_span_attribute(getattr(job, key)) + + func = job.func + if callable(func): + func = func.__name__ + + attributes["rq.job.func"] = _serialize_span_attribute(func) + + return attributes diff --git a/tests/integrations/rq/test_rq.py b/tests/integrations/rq/test_rq.py index dba072766d..fbe5a521d3 100644 --- a/tests/integrations/rq/test_rq.py +++ b/tests/integrations/rq/test_rq.py @@ -118,7 +118,9 @@ def test_transaction_with_error( ) assert envelope["type"] == "transaction" - assert envelope["contexts"]["trace"] == error_event["contexts"]["trace"] + assert envelope["contexts"]["trace"] == DictionaryContaining( + error_event["contexts"]["trace"] + ) assert envelope["transaction"] == error_event["transaction"] assert envelope["extra"]["rq-job"] == DictionaryContaining( { @@ -148,10 +150,7 @@ def test_error_has_trace_context_if_tracing_disabled( assert error_event["contexts"]["trace"] -def test_tracing_enabled( - sentry_init, - capture_events, -): +def test_tracing_enabled(sentry_init, capture_events, DictionaryContaining): sentry_init(integrations=[RqIntegration()], traces_sample_rate=1.0) events = capture_events() @@ -165,7 +164,10 @@ def test_tracing_enabled( assert error_event["transaction"] == "tests.integrations.rq.test_rq.crashing_job" assert transaction["transaction"] == "tests.integrations.rq.test_rq.crashing_job" - assert transaction["contexts"]["trace"] == error_event["contexts"]["trace"] + assert ( + DictionaryContaining(error_event["contexts"]["trace"]) + == transaction["contexts"]["trace"] + ) def test_tracing_disabled( @@ -218,9 +220,7 @@ def test_transaction_no_error( ) -def test_traces_sampler_gets_correct_values_in_sampling_context( - sentry_init, DictionaryContaining, ObjectDescribedBy # noqa:N803 -): +def test_traces_sampler_gets_correct_values_in_sampling_context(sentry_init): traces_sampler = mock.Mock(return_value=True) sentry_init(integrations=[RqIntegration()], traces_sampler=traces_sampler) @@ -230,22 +230,13 @@ def test_traces_sampler_gets_correct_values_in_sampling_context( queue.enqueue(do_trick, "Bodhi", trick="roll over") worker.work(burst=True) - traces_sampler.assert_any_call( - DictionaryContaining( - { - "rq_job": ObjectDescribedBy( - type=rq.job.Job, - attrs={ - "description": "tests.integrations.rq.test_rq.do_trick('Bodhi', trick='roll over')", - "result": "Bodhi, can you roll over? Good dog!", - "func_name": "tests.integrations.rq.test_rq.do_trick", - "args": ("Bodhi",), - "kwargs": {"trick": "roll over"}, - }, - ), - } - ) - ) + sampling_context = traces_sampler.call_args_list[0][0][0] + assert sampling_context["messaging.system"] == "rq" + assert sampling_context["rq.job.args"] == ["Bodhi"] + assert sampling_context["rq.job.kwargs"] == '{"trick": "roll over"}' + assert sampling_context["rq.job.func"] == "do_trick" + assert sampling_context["messaging.message.id"] + assert sampling_context["messaging.destination.name"] == "default" @pytest.mark.skipif( From ad96b2495a5dbb50b836a6d9897eea9850975c1a Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Thu, 21 Nov 2024 17:00:15 +0100 Subject: [PATCH 121/244] Fix httpx tests (#3810) --- tests/integrations/httpx/test_httpx.py | 30 +++++++++++++------------- 1 file changed, 15 insertions(+), 15 deletions(-) diff --git a/tests/integrations/httpx/test_httpx.py b/tests/integrations/httpx/test_httpx.py index f31a665245..440171e8c4 100644 --- a/tests/integrations/httpx/test_httpx.py +++ b/tests/integrations/httpx/test_httpx.py @@ -6,7 +6,7 @@ import responses import sentry_sdk -from sentry_sdk import capture_message, start_transaction +from sentry_sdk import capture_message, start_span from sentry_sdk.consts import MATCH_ALL, SPANDATA from sentry_sdk.integrations.httpx import HttpxIntegration from tests.conftest import ApproxDict @@ -26,7 +26,7 @@ def before_breadcrumb(crumb, hint): url = "http://example.com/" responses.add(responses.GET, url, status=200) - with start_transaction(): + with start_span(): events = capture_events() if asyncio.iscoroutinefunction(httpx_client.get): @@ -72,11 +72,10 @@ def test_outgoing_trace_headers(sentry_init, httpx_client, capture_envelopes): url = "http://example.com/" responses.add(responses.GET, url, status=200) - with start_transaction( + with start_span( name="/interactions/other-dogs/new-dog", op="greeting.sniff", - trace_id="01234567890123456789012345678901", - ) as transaction: + ) as span: if asyncio.iscoroutinefunction(httpx_client.get): response = asyncio.get_event_loop().run_until_complete( httpx_client.get(url) @@ -102,7 +101,7 @@ def test_outgoing_trace_headers(sentry_init, httpx_client, capture_envelopes): (httpx.Client(), httpx.AsyncClient()), ) def test_outgoing_trace_headers_append_to_baggage( - sentry_init, httpx_client, capture_envelopes + sentry_init, httpx_client, capture_envelopes, SortedBaggage, # noqa: N803 ): sentry_init( traces_sample_rate=1.0, @@ -115,11 +114,10 @@ def test_outgoing_trace_headers_append_to_baggage( url = "http://example.com/" responses.add(responses.GET, url, status=200) - with start_transaction( + with start_span( name="/interactions/other-dogs/new-dog", op="greeting.sniff", - trace_id="01234567890123456789012345678901", - ) as transaction: + ): if asyncio.iscoroutinefunction(httpx_client.get): response = asyncio.get_event_loop().run_until_complete( httpx_client.get(url, headers={"baGGage": "custom=data"}) @@ -130,17 +128,18 @@ def test_outgoing_trace_headers_append_to_baggage( (envelope,) = envelopes transaction = envelope.get_transaction_event() request_span = transaction["spans"][-1] + trace_id = transaction["contexts"]["trace"]["trace_id"] assert response.request.headers[ "sentry-trace" ] == "{trace_id}-{parent_span_id}-{sampled}".format( - trace_id=transaction["contexts"]["trace"]["trace_id"], + trace_id=trace_id, parent_span_id=request_span["span_id"], sampled=1, ) assert ( response.request.headers["baggage"] - == "custom=data,sentry-trace_id=01234567890123456789012345678901,sentry-environment=production,sentry-release=d08ebdb9309e1b004c6f52202de58a09c2268e42,sentry-transaction=/interactions/other-dogs/new-dog,sentry-sample_rate=1.0,sentry-sampled=true" + == SortedBaggage(f"custom=data,sentry-trace_id={trace_id},sentry-environment=production,sentry-release=d08ebdb9309e1b004c6f52202de58a09c2268e42,sentry-transaction=/interactions/other-dogs/new-dog,sentry-sample_rate=1.0,sentry-sampled=true") ) @@ -274,7 +273,7 @@ def test_option_trace_propagation_targets( integrations=[HttpxIntegration()], ) - with sentry_sdk.start_transaction(): # Must be in a transaction to propagate headers + with sentry_sdk.start_span(): # Must be in a root span to propagate headers if asyncio.iscoroutinefunction(httpx_client.get): asyncio.get_event_loop().run_until_complete(httpx_client.get(url)) else: @@ -288,7 +287,7 @@ def test_option_trace_propagation_targets( assert "sentry-trace" not in request_headers -def test_do_not_propagate_outside_transaction(sentry_init, httpx_mock): +def test_propagates_twp_outside_root_span(sentry_init, httpx_mock): httpx_mock.add_response() sentry_init( @@ -301,7 +300,8 @@ def test_do_not_propagate_outside_transaction(sentry_init, httpx_mock): httpx_client.get("http://example.com/") request_headers = httpx_mock.get_request().headers - assert "sentry-trace" not in request_headers + assert "sentry-trace" in request_headers + assert request_headers["sentry-trace"] == sentry_sdk.get_traceparent() @pytest.mark.tests_internal_exceptions @@ -352,7 +352,7 @@ def test_span_origin(sentry_init, capture_events, httpx_client): url = "http://example.com/" responses.add(responses.GET, url, status=200) - with start_transaction(name="test_transaction"): + with start_span(name="test_root_span"): if asyncio.iscoroutinefunction(httpx_client.get): asyncio.get_event_loop().run_until_complete(httpx_client.get(url)) else: From 7cfe29a1e64cc30fcbfe0f76b392328df4cd5b69 Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Thu, 21 Nov 2024 17:04:16 +0100 Subject: [PATCH 122/244] Fix asyncio tests (#3811) --- tests/integrations/asyncio/test_asyncio.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/tests/integrations/asyncio/test_asyncio.py b/tests/integrations/asyncio/test_asyncio.py index c9e572ca73..f122f1b804 100644 --- a/tests/integrations/asyncio/test_asyncio.py +++ b/tests/integrations/asyncio/test_asyncio.py @@ -74,7 +74,7 @@ async def test_create_task( events = capture_events() - with sentry_sdk.start_transaction(name="test_transaction_for_create_task"): + with sentry_sdk.start_span(name="test_transaction_for_create_task"): with sentry_sdk.start_span(op="root", name="not so important"): tasks = [event_loop.create_task(foo()), event_loop.create_task(bar())] await asyncio.wait(tasks, return_when=asyncio.FIRST_EXCEPTION) @@ -117,7 +117,7 @@ async def test_gather( events = capture_events() - with sentry_sdk.start_transaction(name="test_transaction_for_gather"): + with sentry_sdk.start_span(name="test_transaction_for_gather"): with sentry_sdk.start_span(op="root", name="not so important"): await asyncio.gather(foo(), bar(), return_exceptions=True) @@ -160,7 +160,8 @@ async def test_exception( events = capture_events() - with sentry_sdk.start_transaction(name="test_exception"): + with sentry_sdk.start_span(name="test_exception"): + sentry_sdk.get_isolation_scope().set_transaction_name("test_exception") with sentry_sdk.start_span(op="root", name="not so important"): tasks = [event_loop.create_task(boom()), event_loop.create_task(bar())] await asyncio.wait(tasks, return_when=asyncio.FIRST_EXCEPTION) @@ -375,7 +376,7 @@ async def test_span_origin( events = capture_events() - with sentry_sdk.start_transaction(name="something"): + with sentry_sdk.start_span(name="something"): tasks = [ event_loop.create_task(foo()), ] From a9b4d473c1faf8d07749a7b5f220477dc3117551 Mon Sep 17 00:00:00 2001 From: Ivana Kellyer Date: Fri, 22 Nov 2024 10:39:58 +0100 Subject: [PATCH 123/244] Turn custom_sampling_context into span attrs in Celery integration (#3813) --- MIGRATION_GUIDE.md | 14 ++++++++++++-- sentry_sdk/integrations/celery/__init__.py | 22 +++++++++++++--------- tests/integrations/celery/test_celery.py | 14 +++++++++----- 3 files changed, 34 insertions(+), 16 deletions(-) diff --git a/MIGRATION_GUIDE.md b/MIGRATION_GUIDE.md index 215dd4e5a1..88a51e8608 100644 --- a/MIGRATION_GUIDE.md +++ b/MIGRATION_GUIDE.md @@ -21,6 +21,16 @@ Looking to upgrade from Sentry SDK 2.x to 3.x? Here's a comprehensive list of wh - clickhouse-driver integration: The query is now available under the `db.query.text` span attribute (only if `send_default_pii` is `True`). - `sentry_sdk.init` now returns `None` instead of a context manager. - The `sampling_context` argument of `traces_sampler` now additionally contains all span attributes known at span start. +- If you're using the Celery integration, the `sampling_context` argument of `traces_sampler` doesn't contain the `celery_job` dictionary anymore. Instead, the individual keys are now available as: + + | Dictionary keys | Sampling context key | + | ---------------------- | -------------------- | + | `celery_job["args"]` | `celery.job.args` | + | `celery_job["kwargs"]` | `celery.job.kwargs` | + | `celery_job["task"]` | `celery.job.task` | + + Note that all of these are serialized, i.e., not the original `args` and `kwargs` but rather OpenTelemetry-friendly span attributes. + - If you're using the AIOHTTP integration, the `sampling_context` argument of `traces_sampler` doesn't contain the `aiohttp_request` object anymore. Instead, some of the individual properties of the request are accessible, if available, as follows: | Request property | Sampling context key(s) | @@ -71,7 +81,7 @@ Looking to upgrade from Sentry SDK 2.x to 3.x? Here's a comprehensive list of wh | `client` | `client.address`, `client.port` | | full URL | `url.full` | -- If you're using the RQ integration, the `sampling_context` argument of `traces_sampler` doesn't contain the `rq_job` object anymore. Instead, the individual properties of the scope, if available, are accessible as follows: +- If you're using the RQ integration, the `sampling_context` argument of `traces_sampler` doesn't contain the `rq_job` object anymore. Instead, the individual properties of the job and the queue, if available, are accessible as follows: | RQ property | Sampling context key(s) | | --------------- | ---------------------------- | @@ -79,7 +89,7 @@ Looking to upgrade from Sentry SDK 2.x to 3.x? Here's a comprehensive list of wh | `rq_job.kwargs` | `rq.job.kwargs` | | `rq_job.func` | `rq.job.func` | | `queue.name` | `messaging.destination.name` | - | `job.id` | `messaging.message.id` | + | `rq_job.id` | `messaging.message.id` | Note that `rq.job.args`, `rq.job.kwargs`, and `rq.job.func` are serialized and not the actual objects on the job. diff --git a/sentry_sdk/integrations/celery/__init__.py b/sentry_sdk/integrations/celery/__init__.py index e2ee4de532..0b66bbf05c 100644 --- a/sentry_sdk/integrations/celery/__init__.py +++ b/sentry_sdk/integrations/celery/__init__.py @@ -20,6 +20,7 @@ ensure_integration_enabled, event_from_exception, reraise, + _serialize_span_attribute, ) from typing import TYPE_CHECKING @@ -318,15 +319,9 @@ def _inner(*args, **kwargs): name=task.name, source=TRANSACTION_SOURCE_TASK, origin=CeleryIntegration.origin, - custom_sampling_context={ - "celery_job": { - "task": task.name, - # for some reason, args[1] is a list if non-empty but a - # tuple if empty - "args": list(args[1]), - "kwargs": args[2], - } - }, + # for some reason, args[1] is a list if non-empty but a + # tuple if empty + attributes=_prepopulate_attributes(task, list(args[1]), args[2]), ) as transaction: transaction.set_status(SPANSTATUS.OK) return f(*args, **kwargs) @@ -516,3 +511,12 @@ def sentry_publish(self, *args, **kwargs): return original_publish(self, *args, **kwargs) Producer.publish = sentry_publish + + +def _prepopulate_attributes(task, args, kwargs): + attributes = { + "celery.job.task": task.name, + "celery.job.args": _serialize_span_attribute(args), + "celery.job.kwargs": _serialize_span_attribute(kwargs), + } + return attributes diff --git a/tests/integrations/celery/test_celery.py b/tests/integrations/celery/test_celery.py index e51341599f..119e0d0e39 100644 --- a/tests/integrations/celery/test_celery.py +++ b/tests/integrations/celery/test_celery.py @@ -13,6 +13,7 @@ _wrap_task_run, ) from sentry_sdk.integrations.celery.beat import _get_headers +from sentry_sdk.utils import _serialize_span_attribute from tests.conftest import ApproxDict @@ -430,7 +431,7 @@ def dummy_task(self, x, y): def test_traces_sampler_gets_task_info_in_sampling_context( - init_celery, celery_invocation, DictionaryContaining # noqa:N803 + init_celery, celery_invocation ): traces_sampler = mock.Mock() celery = init_celery(traces_sampler=traces_sampler) @@ -445,10 +446,13 @@ def walk_dogs(x, y): walk_dogs, [["Maisey", "Charlie", "Bodhi", "Cory"], "Dog park round trip"], 1 ) - traces_sampler.assert_any_call( - # depending on the iteration of celery_invocation, the data might be - # passed as args or as kwargs, so make this generic - DictionaryContaining({"celery_job": dict(task="dog_walk", **args_kwargs)}) + sampling_context = traces_sampler.call_args_list[1][0][0] + assert sampling_context["celery.job.task"] == "dog_walk" + assert sampling_context["celery.job.args"] == _serialize_span_attribute( + args_kwargs["args"] + ) + assert sampling_context["celery.job.kwargs"] == _serialize_span_attribute( + args_kwargs["kwargs"] ) From 5e822de315dc83abc5d391972490bf0df349a1f0 Mon Sep 17 00:00:00 2001 From: Ivana Kellyer Date: Fri, 22 Nov 2024 13:34:24 +0100 Subject: [PATCH 124/244] Span attrs instead of AWS custom sampling context (#3814) --- MIGRATION_GUIDE.md | 12 +++++++ sentry_sdk/integrations/aws_lambda.py | 42 ++++++++++++++++++++--- tests/integrations/aws_lambda/test_aws.py | 20 ++++------- 3 files changed, 57 insertions(+), 17 deletions(-) diff --git a/MIGRATION_GUIDE.md b/MIGRATION_GUIDE.md index 88a51e8608..558188ae1b 100644 --- a/MIGRATION_GUIDE.md +++ b/MIGRATION_GUIDE.md @@ -93,6 +93,18 @@ Looking to upgrade from Sentry SDK 2.x to 3.x? Here's a comprehensive list of wh Note that `rq.job.args`, `rq.job.kwargs`, and `rq.job.func` are serialized and not the actual objects on the job. +- If you're using the AWS Lambda integration, the `sampling_context` argument of `traces_sampler` doesn't contain the `aws_event` and `aws_context` objects anymore. Instead, the following, if available, is accessible: + + | AWS property | Sampling context key(s) | + | ------------------------------------------- | ----------------------- | + | `aws_event["httpMethod"]` | `http.request.method` | + | `aws_event["queryStringParameters"]` | `url.query` | + | `aws_event["path"]` | `url.path` | + | full URL | `url.full` | + | `aws_event["headers"]["X-Forwarded-Proto"]` | `network.protocol.name` | + | `aws_event["headers"]["Host"]` | `server.address` | + | `aws_context["function_name"]` | `faas.name` | + ### Removed - Spans no longer have a `description`. Use `name` instead. diff --git a/sentry_sdk/integrations/aws_lambda.py b/sentry_sdk/integrations/aws_lambda.py index 8579fcb6c5..656d71ec8e 100644 --- a/sentry_sdk/integrations/aws_lambda.py +++ b/sentry_sdk/integrations/aws_lambda.py @@ -39,6 +39,17 @@ MILLIS_TO_SECONDS = 1000.0 +EVENT_TO_ATTRIBUTES = { + "httpMethod": "http.request.method", + "queryStringParameters": "url.query", + "path": "url.path", +} + +CONTEXT_TO_ATTRIBUTES = { + "function_name": "faas.name", +} + + def _wrap_init_error(init_error): # type: (F) -> F @ensure_integration_enabled(AwsLambdaIntegration, init_error) @@ -151,10 +162,7 @@ def sentry_handler(aws_event, aws_context, *args, **kwargs): name=aws_context.function_name, source=TRANSACTION_SOURCE_COMPONENT, origin=AwsLambdaIntegration.origin, - custom_sampling_context={ - "aws_event": aws_event, - "aws_context": aws_context, - }, + attributes=_prepopulate_attributes(aws_event, aws_context), ): try: return handler(aws_event, aws_context, *args, **kwargs) @@ -457,3 +465,29 @@ def _event_from_error_json(error_json): } # type: Event return event + + +def _prepopulate_attributes(aws_event, aws_context): + attributes = {} + + for prop, attr in EVENT_TO_ATTRIBUTES.items(): + if aws_event.get(prop) is not None: + attributes[attr] = aws_event[prop] + + for prop, attr in CONTEXT_TO_ATTRIBUTES.items(): + if getattr(aws_context, prop, None) is not None: + attributes[attr] = getattr(aws_context, prop) + + url = _get_url(aws_event, aws_context) + if url: + if aws_event.get("queryStringParameters"): + url += f"?{aws_event['queryStringParameters']}" + attributes["url.full"] = url + + headers = aws_event.get("headers") or {} + if headers.get("X-Forwarded-Proto"): + attributes["network.protocol.name"] = headers["X-Forwarded-Proto"] + if headers.get("Host"): + attributes["server.address"] = headers["Host"] + + return attributes diff --git a/tests/integrations/aws_lambda/test_aws.py b/tests/integrations/aws_lambda/test_aws.py index e229812336..c1235ae0a0 100644 --- a/tests/integrations/aws_lambda/test_aws.py +++ b/tests/integrations/aws_lambda/test_aws.py @@ -619,18 +619,12 @@ def test_handler(event, context): traces_sampler.assert_any_call( DictionaryContaining( { - "aws_event": DictionaryContaining({ - "httpMethod": "GET", - "path": "/sit/stay/rollover", - "headers": {"Host": "x.io", "X-Forwarded-Proto": "http"}, - }), - "aws_context": ObjectDescribedBy( - type=get_lambda_bootstrap().LambdaContext, - attrs={ - 'function_name': StringContaining("test_"), - 'function_version': '$LATEST', - } - ) + "http.request.method": "GET", + "url.path": "/sit/stay/rollover", + "url.query": "repeat=again", + "url.full": "http://x.io/sit/stay/rollover?repeat=twice", + "network.protocol.name": "http", + "server.address": "x.io", } ) ) @@ -649,7 +643,7 @@ def test_handler(event, context): ) """ ), - b'{"httpMethod": "GET", "path": "/sit/stay/rollover", "headers": {"Host": "x.io", "X-Forwarded-Proto": "http"}}', + b'{"httpMethod": "GET", "path": "/sit/stay/rollover", "query_string": {"repeat": "again"}, "headers": {"Host": "x.io", "X-Forwarded-Proto": "http"}}', ) assert response["Payload"]["AssertionError raised"] is False From 4c079349832c8c1655c595024db55fc23422ccbf Mon Sep 17 00:00:00 2001 From: Ivana Kellyer Date: Fri, 22 Nov 2024 14:55:17 +0100 Subject: [PATCH 125/244] dont set none as attr value (#3816) --- sentry_sdk/tracing.py | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py index 56816c1328..59533106b8 100644 --- a/sentry_sdk/tracing.py +++ b/sentry_sdk/tracing.py @@ -1329,8 +1329,7 @@ def description(self, value): # type: (Optional[str]) -> None from sentry_sdk.integrations.opentelemetry.consts import SentrySpanAttribute - if value is not None: - self.set_attribute(SentrySpanAttribute.DESCRIPTION, value) + self.set_attribute(SentrySpanAttribute.DESCRIPTION, value) @property def origin(self): @@ -1344,8 +1343,7 @@ def origin(self, value): # type: (Optional[str]) -> None from sentry_sdk.integrations.opentelemetry.consts import SentrySpanAttribute - if value is not None: - self.set_attribute(SentrySpanAttribute.ORIGIN, value) + self.set_attribute(SentrySpanAttribute.ORIGIN, value) @property def containing_transaction(self): @@ -1435,8 +1433,7 @@ def op(self, value): # type: (Optional[str]) -> None from sentry_sdk.integrations.opentelemetry.consts import SentrySpanAttribute - if value is not None: - self.set_attribute(SentrySpanAttribute.OP, value) + self.set_attribute(SentrySpanAttribute.OP, value) @property def name(self): @@ -1450,8 +1447,7 @@ def name(self, value): # type: (Optional[str]) -> None from sentry_sdk.integrations.opentelemetry.consts import SentrySpanAttribute - if value is not None: - self.set_attribute(SentrySpanAttribute.NAME, value) + self.set_attribute(SentrySpanAttribute.NAME, value) @property def source(self): @@ -1573,6 +1569,11 @@ def get_attribute(self, name): def set_attribute(self, key, value): # type: (str, Any) -> None + if value is None: + # otel doesn't support None as values, preferring to not set the key + # at all instead + return + self._otel_span.set_attribute(key, _serialize_span_attribute(value)) def set_status(self, status): From 10bc3fa93687620ea9ad61bf5894b529a28189d7 Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Fri, 22 Nov 2024 15:32:51 +0100 Subject: [PATCH 126/244] Fix socket (#3819) --- sentry_sdk/integrations/socket.py | 4 +++- tests/integrations/socket/test_socket.py | 4 ++-- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/sentry_sdk/integrations/socket.py b/sentry_sdk/integrations/socket.py index cba448c3a3..335fd643aa 100644 --- a/sentry_sdk/integrations/socket.py +++ b/sentry_sdk/integrations/socket.py @@ -59,7 +59,9 @@ def create_connection( origin=SocketIntegration.origin, only_if_parent=True, ) as span: - span.set_data("address", address) + host, port = address + span.set_data("address.host", host) + span.set_data("address.port", port) span.set_data("timeout", timeout) span.set_data("source_address", source_address) diff --git a/tests/integrations/socket/test_socket.py b/tests/integrations/socket/test_socket.py index 389256de33..e629114b2b 100644 --- a/tests/integrations/socket/test_socket.py +++ b/tests/integrations/socket/test_socket.py @@ -42,9 +42,9 @@ def test_create_connection_trace(sentry_init, capture_events): assert connect_span["description"] == "example.com:443" assert connect_span["data"] == ApproxDict( { - "address": ["example.com", 443], + "address.host": "example.com", + "address.port": 443, "timeout": timeout, - "source_address": None, } ) From 59f84d4e9513c72eaaeffea9da03915ab685d1c5 Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Thu, 28 Nov 2024 13:58:04 +0100 Subject: [PATCH 127/244] Fix rust tracing (#3817) --- sentry_sdk/integrations/rust_tracing.py | 69 ++++++---------- sentry_sdk/tracing.py | 9 +- .../rust_tracing/test_rust_tracing.py | 82 ++++++++++--------- 3 files changed, 76 insertions(+), 84 deletions(-) diff --git a/sentry_sdk/integrations/rust_tracing.py b/sentry_sdk/integrations/rust_tracing.py index ae52c850c3..d394ba5712 100644 --- a/sentry_sdk/integrations/rust_tracing.py +++ b/sentry_sdk/integrations/rust_tracing.py @@ -37,11 +37,9 @@ import sentry_sdk from sentry_sdk.integrations import Integration from sentry_sdk.scope import should_send_default_pii -from sentry_sdk.tracing import Span as SentrySpan +from sentry_sdk.tracing import POTelSpan as SentrySpan from sentry_sdk.utils import SENSITIVE_DATA_SUBSTITUTE -TraceState = Optional[Tuple[Optional[SentrySpan], SentrySpan]] - class RustTracingLevel(Enum): Trace: str = "TRACE" @@ -171,7 +169,7 @@ def _include_tracing_fields(self) -> bool: else self.include_tracing_fields ) - def on_event(self, event: str, _span_state: TraceState) -> None: + def on_event(self, event: str, _span_state: Optional[SentrySpan]) -> None: deserialized_event = json.loads(event) metadata = deserialized_event.get("metadata", {}) @@ -185,7 +183,7 @@ def on_event(self, event: str, _span_state: TraceState) -> None: elif event_type == EventTypeMapping.Event: process_event(deserialized_event) - def on_new_span(self, attrs: str, span_id: str) -> TraceState: + def on_new_span(self, attrs: str, span_id: str) -> Optional[SentrySpan]: attrs = json.loads(attrs) metadata = attrs.get("metadata", {}) @@ -205,48 +203,35 @@ def on_new_span(self, attrs: str, span_id: str) -> TraceState: else: sentry_span_name = "" - kwargs = { - "op": "function", - "name": sentry_span_name, - "origin": self.origin, - } - - scope = sentry_sdk.get_current_scope() - parent_sentry_span = scope.span - if parent_sentry_span: - sentry_span = parent_sentry_span.start_child(**kwargs) - else: - sentry_span = scope.start_span(**kwargs) + span = sentry_sdk.start_span( + op="function", + name=sentry_span_name, + origin=self.origin, + only_if_parent=True, + ) + span.__enter__() fields = metadata.get("fields", []) for field in fields: if self._include_tracing_fields(): - sentry_span.set_data(field, attrs.get(field)) - else: - sentry_span.set_data(field, SENSITIVE_DATA_SUBSTITUTE) - - scope.span = sentry_span - return (parent_sentry_span, sentry_span) - - def on_close(self, span_id: str, span_state: TraceState) -> None: - if span_state is None: - return - - parent_sentry_span, sentry_span = span_state - sentry_span.finish() - sentry_sdk.get_current_scope().span = parent_sentry_span - - def on_record(self, span_id: str, values: str, span_state: TraceState) -> None: - if span_state is None: - return - _parent_sentry_span, sentry_span = span_state - - deserialized_values = json.loads(values) - for key, value in deserialized_values.items(): - if self._include_tracing_fields(): - sentry_span.set_data(key, value) + span.set_data(field, attrs.get(field)) else: - sentry_span.set_data(key, SENSITIVE_DATA_SUBSTITUTE) + span.set_data(field, SENSITIVE_DATA_SUBSTITUTE) + + return span + + def on_close(self, span_id: str, span: Optional[SentrySpan]) -> None: + if span is not None: + span.__exit__(None, None, None) + + def on_record(self, span_id: str, values: str, span: Optional[SentrySpan]) -> None: + if span is not None: + deserialized_values = json.loads(values) + for key, value in deserialized_values.items(): + if self._include_tracing_fields(): + span.set_data(key, value) + else: + span.set_data(key, SENSITIVE_DATA_SUBSTITUTE) class RustTracingIntegration(Integration): diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py index 59533106b8..4a4f28552b 100644 --- a/sentry_sdk/tracing.py +++ b/sentry_sdk/tracing.py @@ -1,3 +1,4 @@ +import json import uuid import random import time @@ -1631,8 +1632,12 @@ def finish(self, end_timestamp=None): def to_json(self): # type: () -> dict[str, Any] - # TODO-neel-potel for sampling context - pass + """ + Only meant for testing. Not used internally anymore. + """ + if not isinstance(self._otel_span, ReadableSpan): + return {} + return json.loads(self._otel_span.to_json()) def get_trace_context(self): # type: () -> dict[str, Any] diff --git a/tests/integrations/rust_tracing/test_rust_tracing.py b/tests/integrations/rust_tracing/test_rust_tracing.py index 893fc86966..77f07649b2 100644 --- a/tests/integrations/rust_tracing/test_rust_tracing.py +++ b/tests/integrations/rust_tracing/test_rust_tracing.py @@ -11,7 +11,8 @@ RustTracingLevel, EventTypeMapping, ) -from sentry_sdk import start_transaction, capture_message +from sentry_sdk import start_span, capture_message +from tests.conftest import ApproxDict def _test_event_type_mapping(metadata: Dict[str, object]) -> EventTypeMapping: @@ -74,11 +75,11 @@ def test_on_new_span_on_close(sentry_init, capture_events): sentry_init(integrations=[integration], traces_sample_rate=1.0) events = capture_events() - with start_transaction(): + with start_span(): rust_tracing.new_span(RustTracingLevel.Info, 3) sentry_first_rust_span = sentry_sdk.get_current_span() - _, rust_first_rust_span = rust_tracing.spans[3] + rust_first_rust_span = rust_tracing.spans[3] assert sentry_first_rust_span == rust_first_rust_span @@ -102,7 +103,7 @@ def test_on_new_span_on_close(sentry_init, capture_events): data = span["data"] assert data["use_memoized"] assert data["index"] == 10 - assert data["version"] is None + assert "version" not in data def test_nested_on_new_span_on_close(sentry_init, capture_events): @@ -115,23 +116,20 @@ def test_nested_on_new_span_on_close(sentry_init, capture_events): sentry_init(integrations=[integration], traces_sample_rate=1.0) events = capture_events() - with start_transaction(): + with start_span(): original_sentry_span = sentry_sdk.get_current_span() rust_tracing.new_span(RustTracingLevel.Info, 3, index_arg=10) sentry_first_rust_span = sentry_sdk.get_current_span() - _, rust_first_rust_span = rust_tracing.spans[3] + rust_first_rust_span = rust_tracing.spans[3] # Use a different `index_arg` value for the inner span to help # distinguish the two at the end of the test rust_tracing.new_span(RustTracingLevel.Info, 5, index_arg=9) sentry_second_rust_span = sentry_sdk.get_current_span() - rust_parent_span, rust_second_rust_span = rust_tracing.spans[5] + rust_second_rust_span = rust_tracing.spans[5] assert rust_second_rust_span == sentry_second_rust_span - assert rust_parent_span == sentry_first_rust_span - assert rust_parent_span == rust_first_rust_span - assert rust_parent_span != rust_second_rust_span rust_tracing.close_span(5) @@ -171,12 +169,12 @@ def test_nested_on_new_span_on_close(sentry_init, capture_events): first_span_data = first_span["data"] assert first_span_data["use_memoized"] assert first_span_data["index"] == 10 - assert first_span_data["version"] is None + assert "version" not in first_span_data second_span_data = second_span["data"] assert second_span_data["use_memoized"] assert second_span_data["index"] == 9 - assert second_span_data["version"] is None + assert "version" not in second_span_data def test_on_new_span_without_transaction(sentry_init): @@ -207,7 +205,7 @@ def test_on_event_exception(sentry_init, capture_events): events = capture_events() sentry_sdk.get_isolation_scope().clear_breadcrumbs() - with start_transaction(): + with start_span(): rust_tracing.new_span(RustTracingLevel.Info, 3) # Mapped to Exception @@ -243,7 +241,7 @@ def test_on_event_breadcrumb(sentry_init, capture_events): events = capture_events() sentry_sdk.get_isolation_scope().clear_breadcrumbs() - with start_transaction(): + with start_span(): rust_tracing.new_span(RustTracingLevel.Info, 3) # Mapped to Breadcrumb @@ -274,7 +272,7 @@ def test_on_event_event(sentry_init, capture_events): events = capture_events() sentry_sdk.get_isolation_scope().clear_breadcrumbs() - with start_transaction(): + with start_span(): rust_tracing.new_span(RustTracingLevel.Info, 3) # Mapped to Event @@ -311,7 +309,7 @@ def test_on_event_ignored(sentry_init, capture_events): events = capture_events() sentry_sdk.get_isolation_scope().clear_breadcrumbs() - with start_transaction(): + with start_span(): rust_tracing.new_span(RustTracingLevel.Info, 3) # Ignored @@ -344,7 +342,7 @@ def span_filter(metadata: Dict[str, object]) -> bool: sentry_init(integrations=[integration], traces_sample_rate=1.0) events = capture_events() - with start_transaction(): + with start_span(): original_sentry_span = sentry_sdk.get_current_span() # Span is not ignored @@ -377,16 +375,16 @@ def test_record(sentry_init): ) sentry_init(integrations=[integration], traces_sample_rate=1.0) - with start_transaction(): + with start_span(): rust_tracing.new_span(RustTracingLevel.Info, 3) span_before_record = sentry_sdk.get_current_span().to_json() - assert span_before_record["data"]["version"] is None + assert "version" not in span_before_record["attributes"] rust_tracing.record(3) span_after_record = sentry_sdk.get_current_span().to_json() - assert span_after_record["data"]["version"] == "memoized" + assert span_after_record["attributes"]["version"] == "memoized" def test_record_in_ignored_span(sentry_init): @@ -403,18 +401,18 @@ def span_filter(metadata: Dict[str, object]) -> bool: ) sentry_init(integrations=[integration], traces_sample_rate=1.0) - with start_transaction(): + with start_span(): rust_tracing.new_span(RustTracingLevel.Info, 3) span_before_record = sentry_sdk.get_current_span().to_json() - assert span_before_record["data"]["version"] is None + assert "version" not in span_before_record["attributes"] rust_tracing.new_span(RustTracingLevel.Trace, 5) rust_tracing.record(5) # `on_record()` should not do anything to the current Sentry span if the associated Rust span was ignored span_after_record = sentry_sdk.get_current_span().to_json() - assert span_after_record["data"]["version"] is None + assert "version" not in span_after_record["attributes"] @pytest.mark.parametrize( @@ -443,33 +441,37 @@ def test_include_tracing_fields( traces_sample_rate=1.0, send_default_pii=send_default_pii, ) - with start_transaction(): + with start_span(): rust_tracing.new_span(RustTracingLevel.Info, 3) span_before_record = sentry_sdk.get_current_span().to_json() if tracing_fields_expected: - assert span_before_record["data"]["version"] is None + assert "version" not in span_before_record["attributes"] else: - assert span_before_record["data"]["version"] == "[Filtered]" + assert span_before_record["attributes"]["version"] == "[Filtered]" rust_tracing.record(3) span_after_record = sentry_sdk.get_current_span().to_json() if tracing_fields_expected: - assert span_after_record["data"] == { - "thread.id": mock.ANY, - "thread.name": mock.ANY, - "use_memoized": True, - "version": "memoized", - "index": 10, - } + assert span_after_record["attributes"] == ApproxDict( + { + "thread.id": mock.ANY, + "thread.name": mock.ANY, + "use_memoized": True, + "version": "memoized", + "index": 10, + } + ) else: - assert span_after_record["data"] == { - "thread.id": mock.ANY, - "thread.name": mock.ANY, - "use_memoized": "[Filtered]", - "version": "[Filtered]", - "index": "[Filtered]", - } + assert span_after_record["attributes"] == ApproxDict( + { + "thread.id": mock.ANY, + "thread.name": mock.ANY, + "use_memoized": "[Filtered]", + "version": "[Filtered]", + "index": "[Filtered]", + } + ) From 7c2d77002652047d609a97858d0135625c6d5037 Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Thu, 28 Nov 2024 14:00:32 +0100 Subject: [PATCH 128/244] Fix httplib (#3820) --- .../integrations/opentelemetry/utils.py | 53 +++--- sentry_sdk/integrations/stdlib.py | 3 +- tests/integrations/httpx/test_httpx.py | 10 +- tests/integrations/stdlib/test_httplib.py | 157 ++++++++---------- 4 files changed, 110 insertions(+), 113 deletions(-) diff --git a/sentry_sdk/integrations/opentelemetry/utils.py b/sentry_sdk/integrations/opentelemetry/utils.py index 673b334318..1db3f65da1 100644 --- a/sentry_sdk/integrations/opentelemetry/utils.py +++ b/sentry_sdk/integrations/opentelemetry/utils.py @@ -161,36 +161,43 @@ def span_data_for_http_method(span): # type: (ReadableSpan) -> OtelExtractedSpanData span_attributes = span.attributes or {} - op = "http" + op = span_attributes.get(SentrySpanAttribute.OP) + if op is None: + op = "http" - if span.kind == SpanKind.SERVER: - op += ".server" - elif span.kind == SpanKind.CLIENT: - op += ".client" + if span.kind == SpanKind.SERVER: + op += ".server" + elif span.kind == SpanKind.CLIENT: + op += ".client" http_method = span_attributes.get(SpanAttributes.HTTP_METHOD) route = span_attributes.get(SpanAttributes.HTTP_ROUTE) target = span_attributes.get(SpanAttributes.HTTP_TARGET) peer_name = span_attributes.get(SpanAttributes.NET_PEER_NAME) - description = f"{http_method}" - - if route: - description = f"{http_method} {route}" - elif target: - description = f"{http_method} {target}" - elif peer_name: - description = f"{http_method} {peer_name}" - else: - url = span_attributes.get(SpanAttributes.HTTP_URL) - url = cast("Optional[str]", url) - - if url: - parsed_url = urlparse(url) - url = "{}://{}{}".format( - parsed_url.scheme, parsed_url.netloc, parsed_url.path - ) - description = f"{http_method} {url}" + # TODO-neel-potel remove description completely + description = span_attributes.get( + SentrySpanAttribute.DESCRIPTION + ) or span_attributes.get(SentrySpanAttribute.NAME) + if description is None: + description = f"{http_method}" + + if route: + description = f"{http_method} {route}" + elif target: + description = f"{http_method} {target}" + elif peer_name: + description = f"{http_method} {peer_name}" + else: + url = span_attributes.get(SpanAttributes.HTTP_URL) + url = cast("Optional[str]", url) + + if url: + parsed_url = urlparse(url) + url = "{}://{}{}".format( + parsed_url.scheme, parsed_url.netloc, parsed_url.path + ) + description = f"{http_method} {url}" status, http_status = extract_span_status(span) diff --git a/sentry_sdk/integrations/stdlib.py b/sentry_sdk/integrations/stdlib.py index 424e7b88aa..7b704593db 100644 --- a/sentry_sdk/integrations/stdlib.py +++ b/sentry_sdk/integrations/stdlib.py @@ -96,6 +96,7 @@ def putrequest(self, method, url, *args, **kwargs): origin="auto.http.stdlib.httplib", only_if_parent=True, ) + span.__enter__() data = { SPANDATA.HTTP_METHOD: method, @@ -152,7 +153,7 @@ def getresponse(self, *args, **kwargs): span.set_http_status(int(rv.status)) span.set_data("reason", rv.reason) finally: - span.finish() + span.__exit__(None, None, None) return rv diff --git a/tests/integrations/httpx/test_httpx.py b/tests/integrations/httpx/test_httpx.py index 440171e8c4..9890d1f0cc 100644 --- a/tests/integrations/httpx/test_httpx.py +++ b/tests/integrations/httpx/test_httpx.py @@ -101,7 +101,10 @@ def test_outgoing_trace_headers(sentry_init, httpx_client, capture_envelopes): (httpx.Client(), httpx.AsyncClient()), ) def test_outgoing_trace_headers_append_to_baggage( - sentry_init, httpx_client, capture_envelopes, SortedBaggage, # noqa: N803 + sentry_init, + httpx_client, + capture_envelopes, + SortedBaggage, # noqa: N803 ): sentry_init( traces_sample_rate=1.0, @@ -137,9 +140,8 @@ def test_outgoing_trace_headers_append_to_baggage( parent_span_id=request_span["span_id"], sampled=1, ) - assert ( - response.request.headers["baggage"] - == SortedBaggage(f"custom=data,sentry-trace_id={trace_id},sentry-environment=production,sentry-release=d08ebdb9309e1b004c6f52202de58a09c2268e42,sentry-transaction=/interactions/other-dogs/new-dog,sentry-sample_rate=1.0,sentry-sampled=true") + assert response.request.headers["baggage"] == SortedBaggage( + f"custom=data,sentry-trace_id={trace_id},sentry-environment=production,sentry-release=d08ebdb9309e1b004c6f52202de58a09c2268e42,sentry-transaction=/interactions/other-dogs/new-dog,sentry-sample_rate=1.0,sentry-sampled=true" ) diff --git a/tests/integrations/stdlib/test_httplib.py b/tests/integrations/stdlib/test_httplib.py index ab0e04eadc..642c707268 100644 --- a/tests/integrations/stdlib/test_httplib.py +++ b/tests/integrations/stdlib/test_httplib.py @@ -6,7 +6,7 @@ import pytest -from sentry_sdk import capture_message, start_transaction +from sentry_sdk import capture_message, start_span, continue_trace, isolation_scope from sentry_sdk.consts import MATCH_ALL, SPANDATA from sentry_sdk.tracing import Transaction from sentry_sdk.integrations.stdlib import StdlibIntegration @@ -16,6 +16,31 @@ PORT = create_mock_http_server() +@pytest.fixture +def capture_request_headers(monkeypatch): + """ + HTTPConnection.send is passed a string containing (among other things) + the headers on the request. Mock it so we can check the headers. + """ + + def inner(do_send=True): + request_headers = {} + old_send = HTTPConnection.send + + def patched_send(self, data): + for line in data.decode("utf-8").split("\r\n")[1:]: + if line: + key, val = line.split(": ") + request_headers[key] = val + if do_send: + old_send(self, data) + + monkeypatch.setattr(HTTPConnection, "send", patched_send) + return request_headers + + return inner + + def test_crumb_capture(sentry_init, capture_events): sentry_init(integrations=[StdlibIntegration()]) events = capture_events() @@ -79,7 +104,7 @@ def test_empty_realurl(sentry_init): """ sentry_init(dsn="") - HTTPConnection("example.com", port=443).putrequest("POST", None) + HTTPConnection("localhost", PORT).putrequest("POST", None) def test_httplib_misuse(sentry_init, capture_events, request): @@ -131,40 +156,28 @@ def test_httplib_misuse(sentry_init, capture_events, request): ) -def test_outgoing_trace_headers(sentry_init, monkeypatch, capture_envelopes): - # HTTPSConnection.send is passed a string containing (among other things) - # the headers on the request. Mock it so we can check the headers, and also - # so it doesn't try to actually talk to the internet. - mock_send = mock.Mock() - monkeypatch.setattr(HTTPSConnection, "send", mock_send) - +def test_outgoing_trace_headers( + sentry_init, capture_envelopes, capture_request_headers, SortedBaggage +): sentry_init(traces_sample_rate=1.0) - envelopes = capture_envelopes() + request_headers = capture_request_headers() headers = {} + headers["sentry-trace"] = "771a43a4192642f0b136d5159a501700-1234567890abcdef-1" headers["baggage"] = ( "other-vendor-value-1=foo;bar;baz, sentry-trace_id=771a43a4192642f0b136d5159a501700, " - "sentry-public_key=49d0f7386ad645858ae85020e393bef3, sentry-sample_rate=0.01337, " + "sentry-public_key=49d0f7386ad645858ae85020e393bef3, " + "sentry-sampled=true, sentry-sample_rate=0.01337, " "sentry-user_id=Am%C3%A9lie, other-vendor-value-2=foo;bar;" ) - transaction = Transaction.continue_from_headers(headers) - - with start_transaction( - transaction=transaction, - name="/interactions/other-dogs/new-dog", - op="greeting.sniff", - trace_id="12312012123120121231201212312012", - ) as transaction: - HTTPSConnection("www.squirrelchasers.com").request("GET", "/top-chasers") - - (request_str,) = mock_send.call_args[0] - request_headers = {} - for line in request_str.decode("utf-8").split("\r\n")[1:]: - if line: - key, val = line.split(": ") - request_headers[key] = val + with isolation_scope(): + with continue_trace(headers): + with start_span(name="/interactions/other-dogs/new-dog"): + conn = HTTPConnection("localhost", PORT) + conn.request("GET", "/top-chasers") + conn.getresponse() (envelope,) = envelopes transaction = envelope.get_transaction_event() @@ -180,38 +193,30 @@ def test_outgoing_trace_headers(sentry_init, monkeypatch, capture_envelopes): expected_outgoing_baggage = ( "sentry-trace_id=771a43a4192642f0b136d5159a501700," "sentry-public_key=49d0f7386ad645858ae85020e393bef3," + "sentry-sampled=true," "sentry-sample_rate=0.01337," "sentry-user_id=Am%C3%A9lie" ) - assert request_headers["baggage"] == expected_outgoing_baggage + assert request_headers["baggage"] == SortedBaggage(expected_outgoing_baggage) -def test_outgoing_trace_headers_head_sdk(sentry_init, monkeypatch, capture_envelopes): - # HTTPSConnection.send is passed a string containing (among other things) - # the headers on the request. Mock it so we can check the headers, and also - # so it doesn't try to actually talk to the internet. - mock_send = mock.Mock() - monkeypatch.setattr(HTTPSConnection, "send", mock_send) - +def test_outgoing_trace_headers_head_sdk( + sentry_init, monkeypatch, capture_request_headers, capture_envelopes, SortedBaggage +): # make sure transaction is always sampled monkeypatch.setattr(random, "random", lambda: 0.1) sentry_init(traces_sample_rate=0.5, release="foo") - envelopes = capture_envelopes() + request_headers = capture_request_headers() - transaction = Transaction.continue_from_headers({}) - - with start_transaction(transaction=transaction, name="Head SDK tx") as transaction: - HTTPSConnection("www.squirrelchasers.com").request("GET", "/top-chasers") - - (request_str,) = mock_send.call_args[0] - request_headers = {} - for line in request_str.decode("utf-8").split("\r\n")[1:]: - if line: - key, val = line.split(": ") - request_headers[key] = val + with isolation_scope(): + with continue_trace({}): + with start_span(name="Head SDK tx") as root_span: + conn = HTTPConnection("localhost", PORT) + conn.request("GET", "/top-chasers") + conn.getresponse() (envelope,) = envelopes transaction = envelope.get_transaction_event() @@ -225,14 +230,15 @@ def test_outgoing_trace_headers_head_sdk(sentry_init, monkeypatch, capture_envel assert request_headers["sentry-trace"] == expected_sentry_trace expected_outgoing_baggage = ( - "sentry-trace_id=%s," + f"sentry-trace_id={root_span.trace_id}," "sentry-environment=production," "sentry-release=foo," "sentry-sample_rate=0.5," - "sentry-sampled=%s" - ) % (transaction.trace_id, "true" if transaction.sampled else "false") + "sentry-sampled=true," + "sentry-transaction=Head%20SDK%20tx" + ) - assert request_headers["baggage"] == expected_outgoing_baggage + assert request_headers["baggage"] == SortedBaggage(expected_outgoing_baggage) @pytest.mark.parametrize( @@ -295,42 +301,23 @@ def test_outgoing_trace_headers_head_sdk(sentry_init, monkeypatch, capture_envel ], ) def test_option_trace_propagation_targets( - sentry_init, monkeypatch, trace_propagation_targets, host, path, trace_propagated + sentry_init, + capture_request_headers, + trace_propagation_targets, + host, + path, + trace_propagated, ): - # HTTPSConnection.send is passed a string containing (among other things) - # the headers on the request. Mock it so we can check the headers, and also - # so it doesn't try to actually talk to the internet. - mock_send = mock.Mock() - monkeypatch.setattr(HTTPSConnection, "send", mock_send) - sentry_init( trace_propagation_targets=trace_propagation_targets, traces_sample_rate=1.0, ) - headers = { - "baggage": ( - "sentry-trace_id=771a43a4192642f0b136d5159a501700, " - "sentry-public_key=49d0f7386ad645858ae85020e393bef3, sentry-sample_rate=0.01337, " - ) - } - - transaction = Transaction.continue_from_headers(headers) - - with start_transaction( - transaction=transaction, - name="/interactions/other-dogs/new-dog", - op="greeting.sniff", - trace_id="12312012123120121231201212312012", - ) as transaction: - HTTPSConnection(host).request("GET", path) + request_headers = capture_request_headers(do_send=False) - (request_str,) = mock_send.call_args[0] - request_headers = {} - for line in request_str.decode("utf-8").split("\r\n")[1:]: - if line: - key, val = line.split(": ") - request_headers[key] = val + with start_span(name="foo"): + HTTPSConnection(host).request("GET", path) + # don't invoke getresponse to avoid actual network traffic if trace_propagated: assert "sentry-trace" in request_headers @@ -344,8 +331,8 @@ def test_span_origin(sentry_init, capture_events): sentry_init(traces_sample_rate=1.0, debug=True) events = capture_events() - with start_transaction(name="foo"): - conn = HTTPSConnection("example.com") + with start_span(name="foo"): + conn = HTTPConnection("localhost", PORT) conn.request("GET", "/foo") conn.getresponse() @@ -364,9 +351,9 @@ def test_http_timeout(monkeypatch, sentry_init, capture_envelopes): envelopes = capture_envelopes() - with start_transaction(op="op", name="name"): + with start_span(op="op", name="name"): try: - conn = HTTPSConnection("www.squirrelchasers.com") + conn = HTTPConnection("localhost", PORT) conn.request("GET", "/top-chasers") conn.getresponse() except Exception: @@ -385,4 +372,4 @@ def test_http_timeout(monkeypatch, sentry_init, capture_envelopes): span = transaction["spans"][0] assert span["op"] == "http.client" - assert span["description"] == "GET https://www.squirrelchasers.com/top-chasers" + assert span["description"] == f"GET http://localhost:{PORT}/top-chasers" From 5beea991d84121734c1bbfe5d32716dc98fb9ffd Mon Sep 17 00:00:00 2001 From: Ivana Kellyer Date: Thu, 28 Nov 2024 14:57:22 +0100 Subject: [PATCH 129/244] Use span attrs in GCP sampling context (#3818) --- MIGRATION_GUIDE.md | 13 +++++++++ sentry_sdk/integrations/aws_lambda.py | 4 ++- sentry_sdk/integrations/gcp.py | 42 ++++++++++++++++++++------- tests/integrations/gcp/test_gcp.py | 16 ++++------ 4 files changed, 53 insertions(+), 22 deletions(-) diff --git a/MIGRATION_GUIDE.md b/MIGRATION_GUIDE.md index 558188ae1b..1c0fa76fb0 100644 --- a/MIGRATION_GUIDE.md +++ b/MIGRATION_GUIDE.md @@ -105,6 +105,19 @@ Looking to upgrade from Sentry SDK 2.x to 3.x? Here's a comprehensive list of wh | `aws_event["headers"]["Host"]` | `server.address` | | `aws_context["function_name"]` | `faas.name` | +- If you're using the GCP integration, the `sampling_context` argument of `traces_sampler` doesn't contain the `gcp_env` and `gcp_event` keys anymore. Instead, the following, if available, is accessible: + + | Old sampling context key | New sampling context key | + | --------------------------------- | -------------------------- | + | `gcp_env["function_name"]` | `faas.name` | + | `gcp_env["function_region"]` | `faas.region` | + | `gcp_env["function_project"]` | `gcp.function.project` | + | `gcp_env["function_identity"]` | `gcp.function.identity` | + | `gcp_env["function_entry_point"]` | `gcp.function.entry_point` | + | `gcp_event.method` | `http.request.method` | + | `gcp_event.query_string` | `url.query` | + + ### Removed - Spans no longer have a `description`. Use `name` instead. diff --git a/sentry_sdk/integrations/aws_lambda.py b/sentry_sdk/integrations/aws_lambda.py index 656d71ec8e..177d73a638 100644 --- a/sentry_sdk/integrations/aws_lambda.py +++ b/sentry_sdk/integrations/aws_lambda.py @@ -468,7 +468,9 @@ def _event_from_error_json(error_json): def _prepopulate_attributes(aws_event, aws_context): - attributes = {} + attributes = { + "cloud.provider": "aws", + } for prop, attr in EVENT_TO_ATTRIBUTES.items(): if aws_event.get(prop) is not None: diff --git a/sentry_sdk/integrations/gcp.py b/sentry_sdk/integrations/gcp.py index 6ca52397d8..2f17464f70 100644 --- a/sentry_sdk/integrations/gcp.py +++ b/sentry_sdk/integrations/gcp.py @@ -84,22 +84,12 @@ def sentry_func(functionhandler, gcp_event, *args, **kwargs): headers = gcp_event.headers with sentry_sdk.continue_trace(headers): - sampling_context = { - "gcp_env": { - "function_name": environ.get("FUNCTION_NAME"), - "function_entry_point": environ.get("ENTRY_POINT"), - "function_identity": environ.get("FUNCTION_IDENTITY"), - "function_region": environ.get("FUNCTION_REGION"), - "function_project": environ.get("GCP_PROJECT"), - }, - "gcp_event": gcp_event, - } with sentry_sdk.start_transaction( op=OP.FUNCTION_GCP, name=environ.get("FUNCTION_NAME", ""), source=TRANSACTION_SOURCE_COMPONENT, origin=GcpIntegration.origin, - custom_sampling_context=sampling_context, + attributes=_prepopulate_attributes(gcp_event), ): try: return func(functionhandler, gcp_event, *args, **kwargs) @@ -229,3 +219,33 @@ def _get_google_cloud_logs_url(final_time): ) return url + + +ENV_TO_ATTRIBUTE = { + "FUNCTION_NAME": "faas.name", + "ENTRY_POINT": "gcp.function.entry_point", + "FUNCTION_IDENTITY": "gcp.function.identity", + "FUNCTION_REGION": "faas.region", + "GCP_PROJECT": "gcp.function.project", +} + +EVENT_TO_ATTRIBUTE = { + "method": "http.request.method", + "query_string": "url.query", +} + + +def _prepopulate_attributes(gcp_event): + attributes = { + "cloud.provider": "gcp", + } + + for key, attr in ENV_TO_ATTRIBUTE.items(): + if environ.get(key): + attributes[attr] = environ[key] + + for key, attr in EVENT_TO_ATTRIBUTE.items(): + if getattr(gcp_event, key, None): + attributes[attr] = getattr(gcp_event, key) + + return attributes diff --git a/tests/integrations/gcp/test_gcp.py b/tests/integrations/gcp/test_gcp.py index 22d104c817..f33c1b35d7 100644 --- a/tests/integrations/gcp/test_gcp.py +++ b/tests/integrations/gcp/test_gcp.py @@ -304,16 +304,12 @@ def cloud_function(functionhandler, event): try: traces_sampler.assert_any_call( DictionaryContaining({ - "gcp_env": DictionaryContaining({ - "function_name": "chase_into_tree", - "function_region": "dogpark", - "function_project": "SquirrelChasing", - }), - "gcp_event": { - "type": "chase", - "chasers": ["Maisey", "Charlie"], - "num_squirrels": 2, - }, + "faas.name": "chase_into_tree", + "faas.region": "dogpark", + "gcp.function.identity": "func_ID", + "gcp.function.entry_point": "cloud_function", + "gcp.function.project": "SquirrelChasing", + "cloud.provider": "gcp", }) ) except AssertionError: From 8aad6ae296d71dd1af269892bfb7ab80dec6e83e Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Fri, 29 Nov 2024 10:23:15 +0100 Subject: [PATCH 130/244] get new wsgi streaming response to work in potel --- .github/workflows/test-integrations-dbs.yml | 4 +- CHANGELOG.md | 26 ++++ docs/conf.py | 2 +- .../templates/test_group.jinja | 2 +- sentry_sdk/client.py | 12 +- sentry_sdk/consts.py | 5 +- sentry_sdk/integrations/wsgi.py | 112 +++++++++++++----- sentry_sdk/tracing.py | 1 + sentry_sdk/tracing_utils.py | 44 +++++++ setup.py | 2 +- tests/integrations/django/test_basic.py | 46 +++---- tests/integrations/flask/test_flask.py | 22 +++- .../strawberry/test_strawberry.py | 43 +++++-- tests/integrations/wsgi/test_wsgi.py | 87 +++++++++++++- tests/test_scope.py | 18 +++ 15 files changed, 352 insertions(+), 74 deletions(-) diff --git a/.github/workflows/test-integrations-dbs.yml b/.github/workflows/test-integrations-dbs.yml index ac9be77cc2..d0f8887156 100644 --- a/.github/workflows/test-integrations-dbs.yml +++ b/.github/workflows/test-integrations-dbs.yml @@ -53,7 +53,7 @@ jobs: with: python-version: ${{ matrix.python-version }} allow-prereleases: true - - uses: getsentry/action-clickhouse-in-ci@v1 + - uses: getsentry/action-clickhouse-in-ci@v1.1 - name: Setup Test Env run: | pip install "coverage[toml]" tox @@ -138,7 +138,7 @@ jobs: with: python-version: ${{ matrix.python-version }} allow-prereleases: true - - uses: getsentry/action-clickhouse-in-ci@v1 + - uses: getsentry/action-clickhouse-in-ci@v1.1 - name: Setup Test Env run: | pip install "coverage[toml]" tox diff --git a/CHANGELOG.md b/CHANGELOG.md index c47d0e0458..dbb35eb1eb 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,31 @@ # Changelog +## 2.19.0 + +### Various fixes & improvements + +- New: introduce `rust_tracing` integration. See https://docs.sentry.io/platforms/python/integrations/rust_tracing/ (#3717) by @matt-codecov +- Auto enable Litestar integration (#3540) by @provinzkraut +- Deprecate `sentry_sdk.init` context manager (#3729) by @szokeasaurusrex +- feat(spotlight): Send PII to Spotlight when no DSN is set (#3804) by @BYK +- feat(spotlight): Add info logs when Sentry is enabled (#3735) by @BYK +- feat(spotlight): Inject Spotlight button on Django (#3751) by @BYK +- feat(spotlight): Auto enable cache_spans for Spotlight on DEBUG (#3791) by @BYK +- fix(logging): Handle parameter `stack_info` for the `LoggingIntegration` (#3745) by @gmcrocetti +- fix(pure-eval): Make sentry-sdk[pure-eval] installable with pip==24.0 (#3757) by @sentrivana +- fix(rust_tracing): include_tracing_fields arg to control unvetted data in rust_tracing integration (#3780) by @matt-codecov +- fix(aws) Fix aws lambda tests (by reducing event size) (#3770) by @antonpirker +- fix(arq): fix integration with Worker settings as a dict (#3742) by @saber-solooki +- fix(httpx): Prevent Sentry baggage duplication (#3728) by @szokeasaurusrex +- fix(falcon): Don't exhaust request body stream (#3768) by @szokeasaurusrex +- fix(integrations): Check `retries_left` before capturing exception (#3803) by @malkovro +- fix(openai): Use name instead of description (#3807) by @sourceful-rob +- test(gcp): Only run GCP tests when they should (#3721) by @szokeasaurusrex +- chore: Shorten CI workflow names (#3805) by @sentrivana +- chore: Test with pyspark prerelease (#3760) by @sentrivana +- build(deps): bump codecov/codecov-action from 4.6.0 to 5.0.2 (#3792) by @dependabot +- build(deps): bump actions/checkout from 4.2.1 to 4.2.2 (#3691) by @dependabot + ## 2.18.0 ### Various fixes & improvements diff --git a/docs/conf.py b/docs/conf.py index 6d33e5809a..55d5295381 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -31,7 +31,7 @@ copyright = "2019-{}, Sentry Team and Contributors".format(datetime.now().year) author = "Sentry Team and Contributors" -release = "2.18.0" +release = "2.19.0" version = ".".join(release.split(".")[:2]) # The short X.Y version. diff --git a/scripts/split-tox-gh-actions/templates/test_group.jinja b/scripts/split-tox-gh-actions/templates/test_group.jinja index 0187434790..8560e2c4e0 100644 --- a/scripts/split-tox-gh-actions/templates/test_group.jinja +++ b/scripts/split-tox-gh-actions/templates/test_group.jinja @@ -47,7 +47,7 @@ python-version: {% raw %}${{ matrix.python-version }}{% endraw %} allow-prereleases: true {% if needs_clickhouse %} - - uses: getsentry/action-clickhouse-in-ci@v1 + - uses: getsentry/action-clickhouse-in-ci@v1.1 {% endif %} {% if needs_redis %} diff --git a/sentry_sdk/client.py b/sentry_sdk/client.py index d66eb3653e..e5bca4687d 100644 --- a/sentry_sdk/client.py +++ b/sentry_sdk/client.py @@ -122,7 +122,11 @@ def _get_options(*args, **kwargs): rv["traces_sample_rate"] = 1.0 if rv["event_scrubber"] is None: - rv["event_scrubber"] = EventScrubber(send_default_pii=rv["send_default_pii"]) + rv["event_scrubber"] = EventScrubber( + send_default_pii=( + False if rv["send_default_pii"] is None else rv["send_default_pii"] + ) + ) if rv["socket_options"] and not isinstance(rv["socket_options"], list): logger.warning( @@ -402,7 +406,11 @@ def should_send_default_pii(self): Returns whether the client should send default PII (Personally Identifiable Information) data to Sentry. """ - return self.options.get("send_default_pii", False) + result = self.options.get("send_default_pii") + if result is None: + result = not self.options["dsn"] and self.spotlight is not None + + return result @property def dsn(self): diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py index 5143abee5a..a95bcaac71 100644 --- a/sentry_sdk/consts.py +++ b/sentry_sdk/consts.py @@ -476,6 +476,7 @@ class OP: # This type exists to trick mypy and PyCharm into thinking `init` and `Client` # take these arguments (even though they take opaque **kwargs) class ClientConstructor: + def __init__( self, dsn=None, # type: Optional[str] @@ -493,7 +494,7 @@ def __init__( transport=None, # type: Optional[Union[sentry_sdk.transport.Transport, Type[sentry_sdk.transport.Transport], Callable[[Event], None]]] transport_queue_size=DEFAULT_QUEUE_SIZE, # type: int sample_rate=1.0, # type: float - send_default_pii=False, # type: bool + send_default_pii=None, # type: Optional[bool] http_proxy=None, # type: Optional[str] https_proxy=None, # type: Optional[str] ignore_errors=[], # type: Sequence[Union[type, str]] # noqa: B006 @@ -561,4 +562,4 @@ def _get_default_options(): del _get_default_options -VERSION = "2.18.0" +VERSION = "2.19.0" diff --git a/sentry_sdk/integrations/wsgi.py b/sentry_sdk/integrations/wsgi.py index 70324a3641..eaa52b4607 100644 --- a/sentry_sdk/integrations/wsgi.py +++ b/sentry_sdk/integrations/wsgi.py @@ -1,6 +1,7 @@ import sys from contextlib import nullcontext from functools import partial +from threading import Timer import sentry_sdk from sentry_sdk._werkzeug import get_host, _get_headers @@ -12,6 +13,7 @@ ) from sentry_sdk.sessions import track_session from sentry_sdk.tracing import Transaction, TRANSACTION_SOURCE_ROUTE +from sentry_sdk.tracing_utils import finish_running_transaction from sentry_sdk.utils import ( ContextVar, capture_internal_exceptions, @@ -44,6 +46,9 @@ def __call__(self, status, response_headers, exc_info=None): # type: ignore pass +MAX_TRANSACTION_DURATION_SECONDS = 5 * 60 + + _wsgi_middleware_applied = ContextVar("sentry_wsgi_middleware_applied") DEFAULT_TRANSACTION_NAME = "generic WSGI request" @@ -110,6 +115,7 @@ def __call__(self, environ, start_response): scope.set_transaction_name( DEFAULT_TRANSACTION_NAME, source=TRANSACTION_SOURCE_ROUTE ) + current_scope = sentry_sdk.get_current_scope() with track_session(scope, session_mode="request"): with capture_internal_exceptions(): @@ -120,11 +126,15 @@ def __call__(self, environ, start_response): environ, self.use_x_forwarded_for ) ) + method = environ.get("REQUEST_METHOD", "").upper() should_trace = method in self.http_methods_to_capture + transaction = None + with sentry_sdk.continue_trace(environ): - with ( - sentry_sdk.start_span( + timer = None + if should_trace: + transaction = sentry_sdk.start_span( op=OP.HTTP_SERVER, name=DEFAULT_TRANSACTION_NAME, source=TRANSACTION_SOURCE_ROUTE, @@ -133,9 +143,21 @@ def __call__(self, environ, start_response): environ, self.use_x_forwarded_for ), ) - if should_trace - else nullcontext() - ) as transaction: + transaction.__enter__() + current_scope = transaction.scope + + timer = Timer( + MAX_TRANSACTION_DURATION_SECONDS, + finish_running_transaction, + kwargs={ + "transaction": transaction, + "current_scope": current_scope, + "isolation_scope": scope, + "debug": "from timer!", + }, + ) + timer.start() + try: response = self.app( environ, @@ -146,12 +168,21 @@ def __call__(self, environ, start_response): ), ) except BaseException: - reraise(*_capture_exception()) + exc_info = sys.exc_info() + _capture_exception(exc_info) + finish_running_transaction(transaction, exc_info, timer, debug="from except block") + reraise(*exc_info) finally: _wsgi_middleware_applied.set(False) - return _ScopedResponse(scope, response) + return _ScopedResponse( + response=response, + current_scope=current_scope, + isolation_scope=scope, + timer=timer, + transaction=transaction, + ) def _sentry_start_response( # type: ignore @@ -213,13 +244,13 @@ def get_client_ip(environ): return environ.get("REMOTE_ADDR") -def _capture_exception(): - # type: () -> ExcInfo +def _capture_exception(exc_info=None): + # type: (Optional[ExcInfo]) -> ExcInfo """ Captures the current exception and sends it to Sentry. Returns the ExcInfo tuple to it can be reraised afterwards. """ - exc_info = sys.exc_info() + exc_info = exc_info or sys.exc_info() e = exc_info[1] # SystemExit(0) is the only uncaught exception that is expected behavior @@ -237,7 +268,7 @@ def _capture_exception(): class _ScopedResponse: """ - Users a separate scope for each response chunk. + Use separate scopes for each response chunk. This will make WSGI apps more tolerant against: - WSGI servers streaming responses from a different thread/from @@ -246,37 +277,56 @@ class _ScopedResponse: - WSGI servers streaming responses interleaved from the same thread """ - __slots__ = ("_response", "_scope") + __slots__ = ("_response", "_current_scope", "_isolation_scope", "_timer", "_transaction") - def __init__(self, scope, response): - # type: (sentry_sdk.scope.Scope, Iterator[bytes]) -> None - self._scope = scope + def __init__( + self, + response, # type: Iterator[bytes] + current_scope, # type: sentry_sdk.scope.Scope + isolation_scope, # type: sentry_sdk.scope.Scope + timer=None, # type: Optional[Timer] + transaction=None, # type: Optional[Transaction] + ): + # type: (...) -> None self._response = response + self._current_scope = current_scope + self._isolation_scope = isolation_scope + self._timer = timer + self._transaction = transaction def __iter__(self): # type: () -> Iterator[bytes] iterator = iter(self._response) - while True: - with sentry_sdk.use_isolation_scope(self._scope): - try: - chunk = next(iterator) - except StopIteration: - break - except BaseException: - reraise(*_capture_exception()) + try: + while True: + with sentry_sdk.use_isolation_scope(self._isolation_scope): + with sentry_sdk.use_scope(self._current_scope): + try: + chunk = next(iterator) + except StopIteration: + break + except BaseException: + reraise(*_capture_exception()) + + yield chunk - yield chunk + finally: + with sentry_sdk.use_isolation_scope(self._isolation_scope): + with sentry_sdk.use_scope(self._current_scope): + finish_running_transaction(transaction=self._transaction, timer=self._timer, debug="from finally in iterator") def close(self): # type: () -> None - with sentry_sdk.use_isolation_scope(self._scope): - try: - self._response.close() # type: ignore - except AttributeError: - pass - except BaseException: - reraise(*_capture_exception()) + with sentry_sdk.use_isolation_scope(self._isolation_scope): + with sentry_sdk.use_scope(self._current_scope): + try: + finish_running_transaction(transaction=self._transaction, timer=self._timer, debug="from close()") + self._response.close() # type: ignore + except AttributeError: + pass + except BaseException: + reraise(*_capture_exception()) def _make_wsgi_event_processor(environ, use_x_forwarded_for): diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py index 59533106b8..6c39974590 100644 --- a/sentry_sdk/tracing.py +++ b/sentry_sdk/tracing.py @@ -1316,6 +1316,7 @@ def __exit__(self, ty, value, tb): self.finish() context.detach(self._ctx_token) + del self._ctx_token @property def description(self): diff --git a/sentry_sdk/tracing_utils.py b/sentry_sdk/tracing_utils.py index d9527d082b..7afc70358f 100644 --- a/sentry_sdk/tracing_utils.py +++ b/sentry_sdk/tracing_utils.py @@ -36,6 +36,9 @@ from types import FrameType + from sentry_sdk._types import ExcInfo + from threading import Timer + SENTRY_TRACE_REGEX = re.compile( "^[ \t]*" # whitespace @@ -731,3 +734,44 @@ def get_current_span(scope=None): LOW_QUALITY_TRANSACTION_SOURCES, SENTRY_TRACE_HEADER_NAME, ) + +if TYPE_CHECKING: + from sentry_sdk.tracing import Span + + +def finish_running_transaction(transaction=None, exc_info=None, timer=None, current_scope=None, isolation_scope=None, debug=None): + # type: (Optional[sentry_sdk.Transaction], Optional[ExcInfo], Optional[Timer]) -> None + print(f"----") + print(f"\nCALLED finish_running_transaction {id(transaction)} / {transaction} / {exc_info} / {timer} / {current_scope} / {isolation_scope} / debug: {debug}") + import traceback + traceback.print_stack() + # print(f"{id(transaction)} / {sentry_sdk.get_current_scope().transaction}") + print(f"hasattr _ctx_token: {hasattr(transaction, '_ctx_token')}") + # print(f"_ctx_token: {transaction._ctx_token}") + + if timer is not None: + timer.cancel() + + if transaction is not None and hasattr(transaction, "_ctx_token"): + with ( + sentry_sdk.use_isolation_scope(isolation_scope) + if isolation_scope is not None + else contextlib.nullcontext() + ) as scope1: + with ( + sentry_sdk.use_scope(current_scope) + if current_scope is not None + else contextlib.nullcontext() + ) as scope2: + if exc_info is not None: + transaction.__exit__(*exc_info) + else: + transaction.__exit__(None, None, None) + + print(f"End of current scope: {scope2}") + + print(f"End of isolation scope: {scope1}") + else: + print(f"Transaction has no _ctx_token") + + print("done finish_running_transaction") \ No newline at end of file diff --git a/setup.py b/setup.py index 799fc21015..db133fc75a 100644 --- a/setup.py +++ b/setup.py @@ -21,7 +21,7 @@ def get_file_text(file_name): setup( name="sentry-sdk", - version="2.18.0", + version="2.19.0", author="Sentry Team and Contributors", author_email="hello@sentry.io", url="https://github.com/getsentry/sentry-python", diff --git a/tests/integrations/django/test_basic.py b/tests/integrations/django/test_basic.py index 64482a88da..316fa09d59 100644 --- a/tests/integrations/django/test_basic.py +++ b/tests/integrations/django/test_basic.py @@ -50,7 +50,7 @@ def test_view_exceptions(sentry_init, client, capture_exceptions, capture_events sentry_init(integrations=[DjangoIntegration()], send_default_pii=True) exceptions = capture_exceptions() events = capture_events() - client.get(reverse("view_exc")) + unpack_werkzeug_response(client.get(reverse("view_exc"))) (error,) = exceptions assert isinstance(error, ZeroDivisionError) @@ -71,7 +71,9 @@ def test_ensures_x_forwarded_header_is_honored_in_sdk_when_enabled_in_django( sentry_init(integrations=[DjangoIntegration()], send_default_pii=True) exceptions = capture_exceptions() events = capture_events() - client.get(reverse("view_exc"), headers={"X_FORWARDED_HOST": "example.com"}) + unpack_werkzeug_response( + client.get(reverse("view_exc"), headers={"X_FORWARDED_HOST": "example.com"}) + ) (error,) = exceptions assert isinstance(error, ZeroDivisionError) @@ -90,7 +92,9 @@ def test_ensures_x_forwarded_header_is_not_honored_when_unenabled_in_django( sentry_init(integrations=[DjangoIntegration()], send_default_pii=True) exceptions = capture_exceptions() events = capture_events() - client.get(reverse("view_exc"), headers={"X_FORWARDED_HOST": "example.com"}) + unpack_werkzeug_response( + client.get(reverse("view_exc"), headers={"X_FORWARDED_HOST": "example.com"}) + ) (error,) = exceptions assert isinstance(error, ZeroDivisionError) @@ -102,7 +106,7 @@ def test_ensures_x_forwarded_header_is_not_honored_when_unenabled_in_django( def test_middleware_exceptions(sentry_init, client, capture_exceptions): sentry_init(integrations=[DjangoIntegration()], send_default_pii=True) exceptions = capture_exceptions() - client.get(reverse("middleware_exc")) + unpack_werkzeug_response(client.get(reverse("middleware_exc"))) (error,) = exceptions assert isinstance(error, ZeroDivisionError) @@ -156,7 +160,7 @@ def test_has_trace_if_performance_enabled(sentry_init, client, capture_events): traces_sample_rate=1.0, ) events = capture_events() - client.head(reverse("view_exc_with_msg")) + unpack_werkzeug_response(client.head(reverse("view_exc_with_msg"))) (msg_event, error_event, transaction_event) = events @@ -212,8 +216,10 @@ def test_trace_from_headers_if_performance_enabled(sentry_init, client, capture_ trace_id = "582b43a4192642f0b136d5159a501701" sentry_trace_header = "{}-{}-{}".format(trace_id, "6e8f22c393e68f19", 1) - client.head( - reverse("view_exc_with_msg"), headers={"sentry-trace": sentry_trace_header} + unpack_werkzeug_response( + client.head( + reverse("view_exc_with_msg"), headers={"sentry-trace": sentry_trace_header} + ) ) (msg_event, error_event, transaction_event) = events @@ -930,7 +936,7 @@ def test_render_spans(sentry_init, client, capture_events, render_span_tree): for url, expected_line in views_tests: events = capture_events() - client.get(url) + unpack_werkzeug_response(client.get(url)) transaction = events[0] assert expected_line in render_span_tree(transaction) @@ -974,7 +980,7 @@ def test_middleware_spans(sentry_init, client, capture_events, render_span_tree) ) events = capture_events() - client.get(reverse("message")) + unpack_werkzeug_response(client.get(reverse("message"))) message, transaction = events @@ -991,7 +997,7 @@ def test_middleware_spans_disabled(sentry_init, client, capture_events): ) events = capture_events() - client.get(reverse("message")) + unpack_werkzeug_response(client.get(reverse("message"))) message, transaction = events @@ -1015,7 +1021,7 @@ def test_signals_spans(sentry_init, client, capture_events, render_span_tree): ) events = capture_events() - client.get(reverse("message")) + unpack_werkzeug_response(client.get(reverse("message"))) message, transaction = events @@ -1038,7 +1044,7 @@ def test_signals_spans_disabled(sentry_init, client, capture_events): ) events = capture_events() - client.get(reverse("message")) + unpack_werkzeug_response(client.get(reverse("message"))) message, transaction = events @@ -1068,7 +1074,7 @@ def test_signals_spans_filtering(sentry_init, client, capture_events, render_spa ) events = capture_events() - client.get(reverse("send_myapp_custom_signal")) + unpack_werkzeug_response(client.get(reverse("send_myapp_custom_signal"))) (transaction,) = events @@ -1196,7 +1202,7 @@ def test_span_origin(sentry_init, client, capture_events): ) events = capture_events() - client.get(reverse("view_with_signal")) + unpack_werkzeug_response(client.get(reverse("view_with_signal"))) (transaction,) = events @@ -1226,9 +1232,9 @@ def test_transaction_http_method_default(sentry_init, client, capture_events): ) events = capture_events() - client.get(reverse("nomessage")) - client.options(reverse("nomessage")) - client.head(reverse("nomessage")) + unpack_werkzeug_response(client.get(reverse("nomessage"))) + unpack_werkzeug_response(client.options(reverse("nomessage"))) + unpack_werkzeug_response(client.head(reverse("nomessage"))) (event,) = events @@ -1252,9 +1258,9 @@ def test_transaction_http_method_custom(sentry_init, client, capture_events): ) events = capture_events() - client.get(reverse("nomessage")) - client.options(reverse("nomessage")) - client.head(reverse("nomessage")) + unpack_werkzeug_response(client.get(reverse("nomessage"))) + unpack_werkzeug_response(client.options(reverse("nomessage"))) + unpack_werkzeug_response(client.head(reverse("nomessage"))) assert len(events) == 2 diff --git a/tests/integrations/flask/test_flask.py b/tests/integrations/flask/test_flask.py index 4e92df7e7c..ef9f8351db 100644 --- a/tests/integrations/flask/test_flask.py +++ b/tests/integrations/flask/test_flask.py @@ -394,6 +394,8 @@ def index(): client = app.test_client() response = client.post("/", data=data) assert response.status_code == 200 + # Close the response to ensure the WSGI cycle is complete and the transaction is finished + response.close() event, transaction_event = events @@ -746,6 +748,8 @@ def hi_tx(): with app.test_client() as client: response = client.get("/message_tx") assert response.status_code == 200 + # Close the response to ensure the WSGI cycle is complete and the transaction is finished + response.close() message_event, transaction_event = events @@ -940,7 +944,9 @@ def test_response_status_code_not_found_in_transaction_context( envelopes = capture_envelopes() client = app.test_client() - client.get("/not-existing-route") + response = client.get("/not-existing-route") + # Close the response to ensure the WSGI cycle is complete and the transaction is finished + response.close() sentry_sdk.get_client().flush() @@ -985,14 +991,21 @@ def test_transaction_http_method_default( events = capture_events() client = app.test_client() + response = client.get("/nomessage") assert response.status_code == 200 + # Close the response to ensure the WSGI cycle is complete and the transaction is finished + response.close() response = client.options("/nomessage") assert response.status_code == 200 + # Close the response to ensure the WSGI cycle is complete and the transaction is finished + response.close() response = client.head("/nomessage") assert response.status_code == 200 + # Close the response to ensure the WSGI cycle is complete and the transaction is finished + response.close() (event,) = events @@ -1022,14 +1035,21 @@ def test_transaction_http_method_custom( events = capture_events() client = app.test_client() + response = client.get("/nomessage") assert response.status_code == 200 + # Close the response to ensure the WSGI cycle is complete and the transaction is finished + response.close() response = client.options("/nomessage") assert response.status_code == 200 + # Close the response to ensure the WSGI cycle is complete and the transaction is finished + response.close() response = client.head("/nomessage") assert response.status_code == 200 + # Close the response to ensure the WSGI cycle is complete and the transaction is finished + response.close() assert len(events) == 2 diff --git a/tests/integrations/strawberry/test_strawberry.py b/tests/integrations/strawberry/test_strawberry.py index 7b40b238d2..0aab78f443 100644 --- a/tests/integrations/strawberry/test_strawberry.py +++ b/tests/integrations/strawberry/test_strawberry.py @@ -198,7 +198,10 @@ def test_capture_request_if_available_and_send_pii_is_on( client = client_factory(schema) query = "query ErrorQuery { error }" - client.post("/graphql", json={"query": query, "operationName": "ErrorQuery"}) + # Close the response to ensure the WSGI cycle is complete and the transaction is finished + client.post( + "/graphql", json={"query": query, "operationName": "ErrorQuery"} + ).close() assert len(events) == 1 @@ -253,7 +256,10 @@ def test_do_not_capture_request_if_send_pii_is_off( client = client_factory(schema) query = "query ErrorQuery { error }" - client.post("/graphql", json={"query": query, "operationName": "ErrorQuery"}) + # Close the response to ensure the WSGI cycle is complete and the transaction is finished + client.post( + "/graphql", json={"query": query, "operationName": "ErrorQuery"} + ).close() assert len(events) == 1 @@ -293,7 +299,8 @@ def test_breadcrumb_no_operation_name( client = client_factory(schema) query = "{ error }" - client.post("/graphql", json={"query": query}) + # Close the response to ensure the WSGI cycle is complete and the transaction is finished + client.post("/graphql", json={"query": query}).close() assert len(events) == 1 @@ -332,7 +339,10 @@ def test_capture_transaction_on_error( client = client_factory(schema) query = "query ErrorQuery { error }" - client.post("/graphql", json={"query": query, "operationName": "ErrorQuery"}) + # Close the response to ensure the WSGI cycle is complete and the transaction is finished + client.post( + "/graphql", json={"query": query, "operationName": "ErrorQuery"} + ).close() assert len(events) == 2 (_, transaction_event) = events @@ -409,7 +419,10 @@ def test_capture_transaction_on_success( client = client_factory(schema) query = "query GreetingQuery { hello }" - client.post("/graphql", json={"query": query, "operationName": "GreetingQuery"}) + # Close the response to ensure the WSGI cycle is complete and the transaction is finished + client.post( + "/graphql", json={"query": query, "operationName": "GreetingQuery"} + ).close() assert len(events) == 1 (transaction_event,) = events @@ -486,7 +499,8 @@ def test_transaction_no_operation_name( client = client_factory(schema) query = "{ hello }" - client.post("/graphql", json={"query": query}) + # Close the response to ensure the WSGI cycle is complete and the transaction is finished + client.post("/graphql", json={"query": query}).close() assert len(events) == 1 (transaction_event,) = events @@ -566,7 +580,8 @@ def test_transaction_mutation( client = client_factory(schema) query = 'mutation Change { change(attribute: "something") }' - client.post("/graphql", json={"query": query}) + # Close the response to ensure the WSGI cycle is complete and the transaction is finished + client.post("/graphql", json={"query": query}).close() assert len(events) == 1 (transaction_event,) = events @@ -641,7 +656,8 @@ def test_handle_none_query_gracefully( client_factory = request.getfixturevalue(client_factory) client = client_factory(schema) - client.post("/graphql", json={}) + # Close the response to ensure the WSGI cycle is complete and the transaction is finished + client.post("/graphql", json={}).close() assert len(events) == 0, "expected no events to be sent to Sentry" @@ -673,7 +689,8 @@ def test_span_origin( client = client_factory(schema) query = 'mutation Change { change(attribute: "something") }' - client.post("/graphql", json={"query": query}) + # Close the response to ensure the WSGI cycle is complete and the transaction is finished + client.post("/graphql", json={"query": query}).close() (event,) = events @@ -715,7 +732,10 @@ def test_span_origin2( client = client_factory(schema) query = "query GreetingQuery { hello }" - client.post("/graphql", json={"query": query, "operationName": "GreetingQuery"}) + # Close the response to ensure the WSGI cycle is complete and the transaction is finished + client.post( + "/graphql", json={"query": query, "operationName": "GreetingQuery"} + ).close() (event,) = events @@ -757,7 +777,8 @@ def test_span_origin3( client = client_factory(schema) query = "subscription { messageAdded { content } }" - client.post("/graphql", json={"query": query}) + # Close the response to ensure the WSGI cycle is complete and the transaction is finished + client.post("/graphql", json={"query": query}).close() (event,) = events diff --git a/tests/integrations/wsgi/test_wsgi.py b/tests/integrations/wsgi/test_wsgi.py index 0652a775d7..265bc12351 100644 --- a/tests/integrations/wsgi/test_wsgi.py +++ b/tests/integrations/wsgi/test_wsgi.py @@ -1,4 +1,6 @@ +import time from collections import Counter +from datetime import datetime from unittest import mock import pytest @@ -40,7 +42,7 @@ def next(self): def test_basic(sentry_init, crashing_app, capture_events): - sentry_init(send_default_pii=True) + sentry_init(send_default_pii=True, debug=True) app = SentryWsgiMiddleware(crashing_app) client = Client(app) events = capture_events() @@ -141,7 +143,7 @@ def test_transaction_with_error( def dogpark(environ, start_response): raise ValueError("Fetch aborted. The ball was not returned.") - sentry_init(send_default_pii=True, traces_sample_rate=1.0) + sentry_init(send_default_pii=True, traces_sample_rate=1.0, debug=True) app = SentryWsgiMiddleware(dogpark) client = Client(app) events = capture_events() @@ -491,3 +493,84 @@ def dogpark(environ, start_response): (event,) = events assert event["contexts"]["trace"]["origin"] == "auto.dogpark.deluxe" + + +def test_long_running_transaction_finished(sentry_init, capture_events): + """ + Test that a long running transaction is finished after the maximum duration, + no matter if the response is still being generated. + """ + # we allow transactions to be 0.5 seconds as a maximum + new_max_duration = 0.5 + + with mock.patch.object( + sentry_sdk.integrations.wsgi, + "MAX_TRANSACTION_DURATION_SECONDS", + new_max_duration, + ): + + def generate_content(): + # This response will take 1.5 seconds to generate + for _ in range(15): + time.sleep(0.1) + yield "ok" + + def long_running_app(environ, start_response): + start_response("200 OK", []) + return generate_content() + + sentry_init(send_default_pii=True, traces_sample_rate=1.0) + app = SentryWsgiMiddleware(long_running_app) + + events = capture_events() + + client = Client(app) + response = client.get("/") + _ = response.get_data() + + (transaction,) = events + + transaction_duration = ( + datetime.fromisoformat(transaction["timestamp"]) + - datetime.fromisoformat(transaction["start_timestamp"]) + ).total_seconds() + assert ( + transaction_duration <= new_max_duration * 1.02 + ) # we allow 2% margin for processing the request + + +def test_long_running_transaction_timer_canceled(sentry_init, capture_events): + """ + Test that the timer is canceled when the transaction is finished before the maximum duration. + """ + # we allow transactions to be 0.5 seconds as a maximum + new_max_duration = 0.5 + + with mock.patch.object( + sentry_sdk.integrations.wsgi, + "MAX_TRANSACTION_DURATION_SECONDS", + new_max_duration, + ): + with mock.patch( + "sentry_sdk.integrations.wsgi.finish_running_transaction" + ) as mock_finish: + + def generate_content(): + # This response will take 0.3 seconds to generate + for _ in range(3): + time.sleep(0.1) + yield "ok" + + def long_running_app(environ, start_response): + start_response("200 OK", []) + return generate_content() + + sentry_init(send_default_pii=True, traces_sample_rate=1.0) + app = SentryWsgiMiddleware(long_running_app) + + client = Client(app) + response = client.get("/") + _ = response.get_data() + + first_call = mock_finish.mock_calls[0].kwargs + assert first_call["timer"] is not None # if a timer is given, it will be cancelled diff --git a/tests/test_scope.py b/tests/test_scope.py index 8b89d755b8..48b8782190 100644 --- a/tests/test_scope.py +++ b/tests/test_scope.py @@ -822,6 +822,24 @@ def test_should_send_default_pii_false(sentry_init): assert should_send_default_pii() is False +def test_should_send_default_pii_default_false(sentry_init): + sentry_init() + + assert should_send_default_pii() is False + + +def test_should_send_default_pii_false_with_dsn_and_spotlight(sentry_init): + sentry_init(dsn="http://key@localhost/1", spotlight=True) + + assert should_send_default_pii() is False + + +def test_should_send_default_pii_true_without_dsn_and_spotlight(sentry_init): + sentry_init(spotlight=True) + + assert should_send_default_pii() is True + + def test_set_tags(): scope = Scope() scope.set_tags({"tag1": "value1", "tag2": "value2"}) From 59c31914b5d85c70c20d9273573482dec53ef24b Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Fri, 29 Nov 2024 11:15:16 +0100 Subject: [PATCH 131/244] Version without timer thread --- sentry_sdk/integrations/wsgi.py | 26 +++------------- sentry_sdk/tracing_utils.py | 41 ++++---------------------- tests/integrations/wsgi/test_wsgi.py | 44 +++------------------------- 3 files changed, 14 insertions(+), 97 deletions(-) diff --git a/sentry_sdk/integrations/wsgi.py b/sentry_sdk/integrations/wsgi.py index eaa52b4607..816c9b0a6a 100644 --- a/sentry_sdk/integrations/wsgi.py +++ b/sentry_sdk/integrations/wsgi.py @@ -1,7 +1,5 @@ import sys -from contextlib import nullcontext from functools import partial -from threading import Timer import sentry_sdk from sentry_sdk._werkzeug import get_host, _get_headers @@ -132,7 +130,6 @@ def __call__(self, environ, start_response): transaction = None with sentry_sdk.continue_trace(environ): - timer = None if should_trace: transaction = sentry_sdk.start_span( op=OP.HTTP_SERVER, @@ -146,18 +143,6 @@ def __call__(self, environ, start_response): transaction.__enter__() current_scope = transaction.scope - timer = Timer( - MAX_TRANSACTION_DURATION_SECONDS, - finish_running_transaction, - kwargs={ - "transaction": transaction, - "current_scope": current_scope, - "isolation_scope": scope, - "debug": "from timer!", - }, - ) - timer.start() - try: response = self.app( environ, @@ -170,7 +155,7 @@ def __call__(self, environ, start_response): except BaseException: exc_info = sys.exc_info() _capture_exception(exc_info) - finish_running_transaction(transaction, exc_info, timer, debug="from except block") + finish_running_transaction(transaction, exc_info) reraise(*exc_info) finally: @@ -180,7 +165,6 @@ def __call__(self, environ, start_response): response=response, current_scope=current_scope, isolation_scope=scope, - timer=timer, transaction=transaction, ) @@ -277,21 +261,19 @@ class _ScopedResponse: - WSGI servers streaming responses interleaved from the same thread """ - __slots__ = ("_response", "_current_scope", "_isolation_scope", "_timer", "_transaction") + __slots__ = ("_response", "_current_scope", "_isolation_scope", "_transaction") def __init__( self, response, # type: Iterator[bytes] current_scope, # type: sentry_sdk.scope.Scope isolation_scope, # type: sentry_sdk.scope.Scope - timer=None, # type: Optional[Timer] transaction=None, # type: Optional[Transaction] ): # type: (...) -> None self._response = response self._current_scope = current_scope self._isolation_scope = isolation_scope - self._timer = timer self._transaction = transaction def __iter__(self): @@ -314,14 +296,14 @@ def __iter__(self): finally: with sentry_sdk.use_isolation_scope(self._isolation_scope): with sentry_sdk.use_scope(self._current_scope): - finish_running_transaction(transaction=self._transaction, timer=self._timer, debug="from finally in iterator") + finish_running_transaction(transaction=self._transaction) def close(self): # type: () -> None with sentry_sdk.use_isolation_scope(self._isolation_scope): with sentry_sdk.use_scope(self._current_scope): try: - finish_running_transaction(transaction=self._transaction, timer=self._timer, debug="from close()") + finish_running_transaction(transaction=self._transaction) self._response.close() # type: ignore except AttributeError: pass diff --git a/sentry_sdk/tracing_utils.py b/sentry_sdk/tracing_utils.py index 7afc70358f..1f54fa8e37 100644 --- a/sentry_sdk/tracing_utils.py +++ b/sentry_sdk/tracing_utils.py @@ -739,39 +739,10 @@ def get_current_span(scope=None): from sentry_sdk.tracing import Span -def finish_running_transaction(transaction=None, exc_info=None, timer=None, current_scope=None, isolation_scope=None, debug=None): - # type: (Optional[sentry_sdk.Transaction], Optional[ExcInfo], Optional[Timer]) -> None - print(f"----") - print(f"\nCALLED finish_running_transaction {id(transaction)} / {transaction} / {exc_info} / {timer} / {current_scope} / {isolation_scope} / debug: {debug}") - import traceback - traceback.print_stack() - # print(f"{id(transaction)} / {sentry_sdk.get_current_scope().transaction}") - print(f"hasattr _ctx_token: {hasattr(transaction, '_ctx_token')}") - # print(f"_ctx_token: {transaction._ctx_token}") - - if timer is not None: - timer.cancel() - +def finish_running_transaction(transaction=None, exc_info=None): + # type: (Optional[sentry_sdk.Transaction], Optional[ExcInfo]) -> None if transaction is not None and hasattr(transaction, "_ctx_token"): - with ( - sentry_sdk.use_isolation_scope(isolation_scope) - if isolation_scope is not None - else contextlib.nullcontext() - ) as scope1: - with ( - sentry_sdk.use_scope(current_scope) - if current_scope is not None - else contextlib.nullcontext() - ) as scope2: - if exc_info is not None: - transaction.__exit__(*exc_info) - else: - transaction.__exit__(None, None, None) - - print(f"End of current scope: {scope2}") - - print(f"End of isolation scope: {scope1}") - else: - print(f"Transaction has no _ctx_token") - - print("done finish_running_transaction") \ No newline at end of file + if exc_info is not None: + transaction.__exit__(*exc_info) + else: + transaction.__exit__(None, None, None) diff --git a/tests/integrations/wsgi/test_wsgi.py b/tests/integrations/wsgi/test_wsgi.py index 265bc12351..8528d28f5c 100644 --- a/tests/integrations/wsgi/test_wsgi.py +++ b/tests/integrations/wsgi/test_wsgi.py @@ -497,7 +497,7 @@ def dogpark(environ, start_response): def test_long_running_transaction_finished(sentry_init, capture_events): """ - Test that a long running transaction is finished after the maximum duration, + Test that a long running transaction is finished after the maximum duration, no matter if the response is still being generated. """ # we allow transactions to be 0.5 seconds as a maximum @@ -535,42 +535,6 @@ def long_running_app(environ, start_response): - datetime.fromisoformat(transaction["start_timestamp"]) ).total_seconds() assert ( - transaction_duration <= new_max_duration * 1.02 - ) # we allow 2% margin for processing the request - - -def test_long_running_transaction_timer_canceled(sentry_init, capture_events): - """ - Test that the timer is canceled when the transaction is finished before the maximum duration. - """ - # we allow transactions to be 0.5 seconds as a maximum - new_max_duration = 0.5 - - with mock.patch.object( - sentry_sdk.integrations.wsgi, - "MAX_TRANSACTION_DURATION_SECONDS", - new_max_duration, - ): - with mock.patch( - "sentry_sdk.integrations.wsgi.finish_running_transaction" - ) as mock_finish: - - def generate_content(): - # This response will take 0.3 seconds to generate - for _ in range(3): - time.sleep(0.1) - yield "ok" - - def long_running_app(environ, start_response): - start_response("200 OK", []) - return generate_content() - - sentry_init(send_default_pii=True, traces_sample_rate=1.0) - app = SentryWsgiMiddleware(long_running_app) - - client = Client(app) - response = client.get("/") - _ = response.get_data() - - first_call = mock_finish.mock_calls[0].kwargs - assert first_call["timer"] is not None # if a timer is given, it will be cancelled + transaction_duration + <= new_max_duration * 1.02 # we allow 2% margin for processing the request + ), "Long running transaction has not been finished after a set maximum duration" From fceea4f698b350d8b111a26538317d8b75440063 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Fri, 29 Nov 2024 11:26:38 +0100 Subject: [PATCH 132/244] linting --- tests/integrations/httpx/test_httpx.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/tests/integrations/httpx/test_httpx.py b/tests/integrations/httpx/test_httpx.py index 440171e8c4..9890d1f0cc 100644 --- a/tests/integrations/httpx/test_httpx.py +++ b/tests/integrations/httpx/test_httpx.py @@ -101,7 +101,10 @@ def test_outgoing_trace_headers(sentry_init, httpx_client, capture_envelopes): (httpx.Client(), httpx.AsyncClient()), ) def test_outgoing_trace_headers_append_to_baggage( - sentry_init, httpx_client, capture_envelopes, SortedBaggage, # noqa: N803 + sentry_init, + httpx_client, + capture_envelopes, + SortedBaggage, # noqa: N803 ): sentry_init( traces_sample_rate=1.0, @@ -137,9 +140,8 @@ def test_outgoing_trace_headers_append_to_baggage( parent_span_id=request_span["span_id"], sampled=1, ) - assert ( - response.request.headers["baggage"] - == SortedBaggage(f"custom=data,sentry-trace_id={trace_id},sentry-environment=production,sentry-release=d08ebdb9309e1b004c6f52202de58a09c2268e42,sentry-transaction=/interactions/other-dogs/new-dog,sentry-sample_rate=1.0,sentry-sampled=true") + assert response.request.headers["baggage"] == SortedBaggage( + f"custom=data,sentry-trace_id={trace_id},sentry-environment=production,sentry-release=d08ebdb9309e1b004c6f52202de58a09c2268e42,sentry-transaction=/interactions/other-dogs/new-dog,sentry-sample_rate=1.0,sentry-sampled=true" ) From 19d4e07f7b3ce35b4e095dbdfec998c2e0c57074 Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Mon, 2 Dec 2024 15:06:18 +0100 Subject: [PATCH 133/244] Move PotelSentrySpanProcessor to SentrySpanProcessor (#3828) * remove the older one * fix propagator `inject` to not rely on old SentrySpanProcessor span map --- .../integrations/opentelemetry/integration.py | 6 +- .../opentelemetry/potel_span_processor.py | 273 --------- .../integrations/opentelemetry/propagator.py | 37 +- .../opentelemetry/span_processor.py | 427 +++++++------ .../opentelemetry/test_propagator.py | 206 ++----- .../opentelemetry/test_span_processor.py | 576 ------------------ 6 files changed, 262 insertions(+), 1263 deletions(-) delete mode 100644 sentry_sdk/integrations/opentelemetry/potel_span_processor.py delete mode 100644 tests/integrations/opentelemetry/test_span_processor.py diff --git a/sentry_sdk/integrations/opentelemetry/integration.py b/sentry_sdk/integrations/opentelemetry/integration.py index 016ce91b52..231bb4f32b 100644 --- a/sentry_sdk/integrations/opentelemetry/integration.py +++ b/sentry_sdk/integrations/opentelemetry/integration.py @@ -6,8 +6,8 @@ from sentry_sdk.integrations import DidNotEnable, Integration from sentry_sdk.integrations.opentelemetry.propagator import SentryPropagator -from sentry_sdk.integrations.opentelemetry.potel_span_processor import ( - PotelSentrySpanProcessor, +from sentry_sdk.integrations.opentelemetry.span_processor import ( + SentrySpanProcessor, ) from sentry_sdk.integrations.opentelemetry.contextvars_context import ( SentryContextVarsRuntimeContext, @@ -79,7 +79,7 @@ def _setup_scope_context_management(): def _setup_sentry_tracing(): # type: () -> None provider = TracerProvider(sampler=SentrySampler()) - provider.add_span_processor(PotelSentrySpanProcessor()) + provider.add_span_processor(SentrySpanProcessor()) trace.set_tracer_provider(provider) set_global_textmap(SentryPropagator()) diff --git a/sentry_sdk/integrations/opentelemetry/potel_span_processor.py b/sentry_sdk/integrations/opentelemetry/potel_span_processor.py deleted file mode 100644 index 14636b9e37..0000000000 --- a/sentry_sdk/integrations/opentelemetry/potel_span_processor.py +++ /dev/null @@ -1,273 +0,0 @@ -from collections import deque, defaultdict -from typing import cast - -from opentelemetry.trace import ( - format_trace_id, - format_span_id, - get_current_span, - INVALID_SPAN, - Span as AbstractSpan, -) -from opentelemetry.context import Context -from opentelemetry.sdk.trace import Span, ReadableSpan, SpanProcessor - -from sentry_sdk import capture_event -from sentry_sdk.consts import SPANDATA -from sentry_sdk.tracing import DEFAULT_SPAN_ORIGIN -from sentry_sdk.utils import get_current_thread_meta -from sentry_sdk.profiler.continuous_profiler import ( - try_autostart_continuous_profiler, - get_profiler_id, -) -from sentry_sdk.profiler.transaction_profiler import Profile -from sentry_sdk.integrations.opentelemetry.utils import ( - is_sentry_span, - convert_from_otel_timestamp, - extract_span_attributes, - extract_span_data, - extract_transaction_name_source, - get_trace_context, - get_profile_context, - get_sentry_meta, - set_sentry_meta, -) -from sentry_sdk.integrations.opentelemetry.consts import ( - OTEL_SENTRY_CONTEXT, - SentrySpanAttribute, -) -from sentry_sdk._types import TYPE_CHECKING - -if TYPE_CHECKING: - from typing import Optional, List, Any, Deque, DefaultDict - from sentry_sdk._types import Event - - -class PotelSentrySpanProcessor(SpanProcessor): - """ - Converts OTel spans into Sentry spans so they can be sent to the Sentry backend. - """ - - def __new__(cls): - # type: () -> PotelSentrySpanProcessor - if not hasattr(cls, "instance"): - cls.instance = super().__new__(cls) - - return cls.instance - - def __init__(self): - # type: () -> None - self._children_spans = defaultdict( - list - ) # type: DefaultDict[int, List[ReadableSpan]] - - def on_start(self, span, parent_context=None): - # type: (Span, Optional[Context]) -> None - if is_sentry_span(span): - return - - self._add_root_span(span, get_current_span(parent_context)) - self._start_profile(span) - - def on_end(self, span): - # type: (ReadableSpan) -> None - if is_sentry_span(span): - return - - is_root_span = not span.parent or span.parent.is_remote - if is_root_span: - # if have a root span ending, we build a transaction and send it - self._flush_root_span(span) - else: - self._children_spans[span.parent.span_id].append(span) - - # TODO-neel-potel not sure we need a clear like JS - def shutdown(self): - # type: () -> None - pass - - # TODO-neel-potel change default? this is 30 sec - # TODO-neel-potel call this in client.flush - def force_flush(self, timeout_millis=30000): - # type: (int) -> bool - return True - - def _add_root_span(self, span, parent_span): - # type: (Span, AbstractSpan) -> None - """ - This is required to make POTelSpan.root_span work - since we can't traverse back to the root purely with otel efficiently. - """ - if parent_span != INVALID_SPAN and not parent_span.get_span_context().is_remote: - # child span points to parent's root or parent - parent_root_span = get_sentry_meta(parent_span, "root_span") - set_sentry_meta(span, "root_span", parent_root_span or parent_span) - else: - # root span points to itself - set_sentry_meta(span, "root_span", span) - - def _start_profile(self, span): - # type: (Span) -> None - try_autostart_continuous_profiler() - profiler_id = get_profiler_id() - thread_id, thread_name = get_current_thread_meta() - - if profiler_id: - span.set_attribute(SPANDATA.PROFILER_ID, profiler_id) - if thread_id: - span.set_attribute(SPANDATA.THREAD_ID, str(thread_id)) - if thread_name: - span.set_attribute(SPANDATA.THREAD_NAME, thread_name) - - is_root_span = not span.parent or span.parent.is_remote - sampled = span.context and span.context.trace_flags.sampled - - if is_root_span and sampled: - # profiler uses time.perf_counter_ns() so we cannot use the - # unix timestamp that is on span.start_time - # setting it to 0 means the profiler will internally measure time on start - profile = Profile(sampled, 0) - # TODO-neel-potel sampling context?? - profile._set_initial_sampling_decision(sampling_context={}) - profile.__enter__() - set_sentry_meta(span, "profile", profile) - - def _flush_root_span(self, span): - # type: (ReadableSpan) -> None - transaction_event = self._root_span_to_transaction_event(span) - if not transaction_event: - return - - spans = [] - for child in self._collect_children(span): - span_json = self._span_to_json(child) - if span_json: - spans.append(span_json) - transaction_event["spans"] = spans - # TODO-neel-potel sort and cutoff max spans - - capture_event(transaction_event) - - def _collect_children(self, span): - # type: (ReadableSpan) -> List[ReadableSpan] - if not span.context: - return [] - - children = [] - bfs_queue = deque() # type: Deque[int] - bfs_queue.append(span.context.span_id) - - while bfs_queue: - parent_span_id = bfs_queue.popleft() - node_children = self._children_spans.pop(parent_span_id, []) - children.extend(node_children) - bfs_queue.extend( - [child.context.span_id for child in node_children if child.context] - ) - - return children - - # we construct the event from scratch here - # and not use the current Transaction class for easier refactoring - def _root_span_to_transaction_event(self, span): - # type: (ReadableSpan) -> Optional[Event] - if not span.context: - return None - - event = self._common_span_transaction_attributes_as_json(span) - if event is None: - return None - - transaction_name, transaction_source = extract_transaction_name_source(span) - span_data = extract_span_data(span) - trace_context = get_trace_context(span, span_data=span_data) - contexts = {"trace": trace_context} - - profile_context = get_profile_context(span) - if profile_context: - contexts["profile"] = profile_context - - (_, description, _, http_status, _) = span_data - - if http_status: - contexts["response"] = {"status_code": http_status} - - if span.resource.attributes: - contexts[OTEL_SENTRY_CONTEXT] = {"resource": dict(span.resource.attributes)} - - event.update( - { - "type": "transaction", - "transaction": transaction_name or description, - "transaction_info": {"source": transaction_source or "custom"}, - "contexts": contexts, - } - ) - - profile = cast("Optional[Profile]", get_sentry_meta(span, "profile")) - if profile: - profile.__exit__(None, None, None) - if profile.valid(): - event["profile"] = profile - set_sentry_meta(span, "profile", None) - - return event - - def _span_to_json(self, span): - # type: (ReadableSpan) -> Optional[dict[str, Any]] - if not span.context: - return None - - # This is a safe cast because dict[str, Any] is a superset of Event - span_json = cast( - "dict[str, Any]", self._common_span_transaction_attributes_as_json(span) - ) - if span_json is None: - return None - - trace_id = format_trace_id(span.context.trace_id) - span_id = format_span_id(span.context.span_id) - parent_span_id = format_span_id(span.parent.span_id) if span.parent else None - - (op, description, status, _, origin) = extract_span_data(span) - - span_json.update( - { - "trace_id": trace_id, - "span_id": span_id, - "op": op, - "description": description, - "status": status, - "origin": origin or DEFAULT_SPAN_ORIGIN, - } - ) - - if status: - span_json.setdefault("tags", {})["status"] = status - - if parent_span_id: - span_json["parent_span_id"] = parent_span_id - - if span.attributes: - span_json["data"] = dict(span.attributes) - - return span_json - - def _common_span_transaction_attributes_as_json(self, span): - # type: (ReadableSpan) -> Optional[Event] - if not span.start_time or not span.end_time: - return None - - common_json = { - "start_timestamp": convert_from_otel_timestamp(span.start_time), - "timestamp": convert_from_otel_timestamp(span.end_time), - } # type: Event - - measurements = extract_span_attributes(span, SentrySpanAttribute.MEASUREMENT) - if measurements: - common_json["measurements"] = measurements - - tags = extract_span_attributes(span, SentrySpanAttribute.TAG) - if tags: - common_json["tags"] = tags - - return common_json diff --git a/sentry_sdk/integrations/opentelemetry/propagator.py b/sentry_sdk/integrations/opentelemetry/propagator.py index b84d582d6e..0c6eda27a2 100644 --- a/sentry_sdk/integrations/opentelemetry/propagator.py +++ b/sentry_sdk/integrations/opentelemetry/propagator.py @@ -1,7 +1,10 @@ +from typing import cast + from opentelemetry import trace from opentelemetry.context import ( Context, get_current, + get_value, set_value, ) from opentelemetry.propagators.textmap import ( @@ -21,9 +24,7 @@ from sentry_sdk.integrations.opentelemetry.consts import ( SENTRY_BAGGAGE_KEY, SENTRY_TRACE_KEY, -) -from sentry_sdk.integrations.opentelemetry.span_processor import ( - SentrySpanProcessor, + SENTRY_SCOPES_KEY, ) from sentry_sdk.tracing import ( BAGGAGE_HEADER_NAME, @@ -35,6 +36,7 @@ if TYPE_CHECKING: from typing import Optional, Set + from sentry_sdk.integrations.opentelemetry.scope import PotelScope class SentryPropagator(TextMapPropagator): @@ -47,6 +49,7 @@ def extract(self, carrier, context=None, getter=default_getter): if context is None: context = get_current() + # TODO-neel-potel cleanup with continue_trace / isolation_scope sentry_trace = getter.get(carrier, SENTRY_TRACE_HEADER_NAME) if not sentry_trace: return context @@ -89,27 +92,15 @@ def inject(self, carrier, context=None, setter=default_setter): if context is None: context = get_current() - current_span = trace.get_current_span(context) - current_span_context = current_span.get_span_context() - - if not current_span_context.is_valid: - return - - span_id = trace.format_span_id(current_span_context.span_id) - - span_map = SentrySpanProcessor().otel_span_map - sentry_span = span_map.get(span_id, None) - if not sentry_span: - return - - setter.set(carrier, SENTRY_TRACE_HEADER_NAME, sentry_span.to_traceparent()) + scopes = get_value(SENTRY_SCOPES_KEY, context) + if scopes: + scopes = cast("tuple[PotelScope, PotelScope]", scopes) + (current_scope, _) = scopes - if sentry_span.containing_transaction: - baggage = sentry_span.containing_transaction.get_baggage() - if baggage: - baggage_data = baggage.serialize() - if baggage_data: - setter.set(carrier, BAGGAGE_HEADER_NAME, baggage_data) + # TODO-neel-potel check trace_propagation_targets + # TODO-neel-potel test propagator works with twp + for (key, value) in current_scope.iter_trace_propagation_headers(): + setter.set(carrier, key, value) @property def fields(self): diff --git a/sentry_sdk/integrations/opentelemetry/span_processor.py b/sentry_sdk/integrations/opentelemetry/span_processor.py index e33a6afca3..37f27d8cba 100644 --- a/sentry_sdk/integrations/opentelemetry/span_processor.py +++ b/sentry_sdk/integrations/opentelemetry/span_processor.py @@ -1,62 +1,45 @@ -from datetime import datetime, timezone -from time import time -from typing import TYPE_CHECKING, cast +from collections import deque, defaultdict +from typing import cast -from opentelemetry.context import get_value -from opentelemetry.sdk.trace import SpanProcessor, ReadableSpan as OTelSpan from opentelemetry.trace import ( - format_span_id, format_trace_id, + format_span_id, get_current_span, + INVALID_SPAN, + Span as AbstractSpan, ) -from opentelemetry.trace.span import ( - INVALID_SPAN_ID, - INVALID_TRACE_ID, -) -from sentry_sdk.integrations.opentelemetry.consts import ( - SENTRY_BAGGAGE_KEY, - SENTRY_TRACE_KEY, - OTEL_SENTRY_CONTEXT, - SPAN_ORIGIN, +from opentelemetry.context import Context +from opentelemetry.sdk.trace import Span, ReadableSpan, SpanProcessor + +import sentry_sdk +from sentry_sdk.consts import SPANDATA +from sentry_sdk.tracing import DEFAULT_SPAN_ORIGIN +from sentry_sdk.utils import get_current_thread_meta +from sentry_sdk.profiler.continuous_profiler import ( + try_autostart_continuous_profiler, + get_profiler_id, ) +from sentry_sdk.profiler.transaction_profiler import Profile from sentry_sdk.integrations.opentelemetry.utils import ( is_sentry_span, + convert_from_otel_timestamp, + extract_span_attributes, extract_span_data, + extract_transaction_name_source, + get_trace_context, + get_profile_context, + get_sentry_meta, + set_sentry_meta, ) -from sentry_sdk.scope import add_global_event_processor -from sentry_sdk.tracing import Transaction, Span as SentrySpan - +from sentry_sdk.integrations.opentelemetry.consts import ( + OTEL_SENTRY_CONTEXT, + SentrySpanAttribute, +) +from sentry_sdk._types import TYPE_CHECKING if TYPE_CHECKING: - from typing import Any, Optional, Union - from opentelemetry import context as context_api - from sentry_sdk._types import Event, Hint - -SPAN_MAX_TIME_OPEN_MINUTES = 10 - - -def link_trace_context_to_error_event(event, otel_span_map): - # type: (Event, dict[str, Union[Transaction, SentrySpan]]) -> Event - if hasattr(event, "type") and event["type"] == "transaction": - return event - - otel_span = get_current_span() - if not otel_span: - return event - - ctx = otel_span.get_span_context() - - if ctx.trace_id == INVALID_TRACE_ID or ctx.span_id == INVALID_SPAN_ID: - return event - - sentry_span = otel_span_map.get(format_span_id(ctx.span_id), None) - if not sentry_span: - return event - - contexts = event.setdefault("contexts", {}) - contexts.setdefault("trace", {}).update(sentry_span.get_trace_context()) - - return event + from typing import Optional, List, Any, Deque, DefaultDict + from sentry_sdk._types import Event class SentrySpanProcessor(SpanProcessor): @@ -64,12 +47,6 @@ class SentrySpanProcessor(SpanProcessor): Converts OTel spans into Sentry spans so they can be sent to the Sentry backend. """ - # The mapping from otel span ids to sentry spans - otel_span_map = {} # type: dict[str, Union[Transaction, SentrySpan]] - - # The currently open spans. Elements will be discarded after SPAN_MAX_TIME_OPEN_MINUTES - open_spans = {} # type: dict[int, set[str]] - def __new__(cls): # type: () -> SentrySpanProcessor if not hasattr(cls, "instance"): @@ -79,200 +56,218 @@ def __new__(cls): def __init__(self): # type: () -> None - @add_global_event_processor - def global_event_processor(event, hint): - # type: (Event, Hint) -> Event - return link_trace_context_to_error_event(event, self.otel_span_map) + self._children_spans = defaultdict( + list + ) # type: DefaultDict[int, List[ReadableSpan]] - def _prune_old_spans(self): - # type: (SentrySpanProcessor) -> None - """ - Prune spans that have been open for too long. - """ - current_time_minutes = int(time() / 60) - for span_start_minutes in list( - self.open_spans.keys() - ): # making a list because we change the dict - # prune empty open spans buckets - if self.open_spans[span_start_minutes] == set(): - self.open_spans.pop(span_start_minutes) - - # prune old buckets - elif current_time_minutes - span_start_minutes > SPAN_MAX_TIME_OPEN_MINUTES: - for span_id in self.open_spans.pop(span_start_minutes): - self.otel_span_map.pop(span_id, None) - - def on_start(self, otel_span, parent_context=None): - # type: (OTelSpan, Optional[context_api.Context]) -> None - from sentry_sdk import get_client, start_transaction - - client = get_client() - - if not client.dsn: + def on_start(self, span, parent_context=None): + # type: (Span, Optional[Context]) -> None + if is_sentry_span(span): return - if not otel_span.get_span_context().is_valid: - return + self._add_root_span(span, get_current_span(parent_context)) + self._start_profile(span) - if is_sentry_span(otel_span): + def on_end(self, span): + # type: (ReadableSpan) -> None + if is_sentry_span(span): return - trace_data = self._get_trace_data(otel_span, parent_context) - - parent_span_id = trace_data["parent_span_id"] - sentry_parent_span = ( - self.otel_span_map.get(parent_span_id) if parent_span_id else None - ) - - start_timestamp = None - if otel_span.start_time is not None: - start_timestamp = datetime.fromtimestamp( - otel_span.start_time / 1e9, timezone.utc - ) # OTel spans have nanosecond precision - - sentry_span = None - if sentry_parent_span: - sentry_span = sentry_parent_span.start_child( - span_id=trace_data["span_id"], - name=otel_span.name, - start_timestamp=start_timestamp, - origin=SPAN_ORIGIN, - ) + is_root_span = not span.parent or span.parent.is_remote + if is_root_span: + # if have a root span ending, we build a transaction and send it + self._flush_root_span(span) else: - sentry_span = start_transaction( - name=otel_span.name, - span_id=trace_data["span_id"], - parent_span_id=parent_span_id, - trace_id=trace_data["trace_id"], - baggage=trace_data["baggage"], - start_timestamp=start_timestamp, - origin=SPAN_ORIGIN, - ) - - self.otel_span_map[trace_data["span_id"]] = sentry_span + self._children_spans[span.parent.span_id].append(span) - if otel_span.start_time is not None: - span_start_in_minutes = int( - otel_span.start_time / 1e9 / 60 - ) # OTel spans have nanosecond precision - self.open_spans.setdefault(span_start_in_minutes, set()).add( - trace_data["span_id"] - ) - - self._prune_old_spans() + # TODO-neel-potel not sure we need a clear like JS + def shutdown(self): + # type: () -> None + pass - def on_end(self, otel_span): - # type: (OTelSpan) -> None - span_context = otel_span.get_span_context() - if not span_context.is_valid: - return + # TODO-neel-potel change default? this is 30 sec + # TODO-neel-potel call this in client.flush + def force_flush(self, timeout_millis=30000): + # type: (int) -> bool + return True - span_id = format_span_id(span_context.span_id) - sentry_span = self.otel_span_map.pop(span_id, None) - if not sentry_span: + def _add_root_span(self, span, parent_span): + # type: (Span, AbstractSpan) -> None + """ + This is required to make POTelSpan.root_span work + since we can't traverse back to the root purely with otel efficiently. + """ + if parent_span != INVALID_SPAN and not parent_span.get_span_context().is_remote: + # child span points to parent's root or parent + parent_root_span = get_sentry_meta(parent_span, "root_span") + set_sentry_meta(span, "root_span", parent_root_span or parent_span) + else: + # root span points to itself + set_sentry_meta(span, "root_span", span) + + def _start_profile(self, span): + # type: (Span) -> None + try_autostart_continuous_profiler() + profiler_id = get_profiler_id() + thread_id, thread_name = get_current_thread_meta() + + if profiler_id: + span.set_attribute(SPANDATA.PROFILER_ID, profiler_id) + if thread_id: + span.set_attribute(SPANDATA.THREAD_ID, str(thread_id)) + if thread_name: + span.set_attribute(SPANDATA.THREAD_NAME, thread_name) + + is_root_span = not span.parent or span.parent.is_remote + sampled = span.context and span.context.trace_flags.sampled + + if is_root_span and sampled: + # profiler uses time.perf_counter_ns() so we cannot use the + # unix timestamp that is on span.start_time + # setting it to 0 means the profiler will internally measure time on start + profile = Profile(sampled, 0) + # TODO-neel-potel sampling context?? + profile._set_initial_sampling_decision(sampling_context={}) + profile.__enter__() + set_sentry_meta(span, "profile", profile) + + def _flush_root_span(self, span): + # type: (ReadableSpan) -> None + transaction_event = self._root_span_to_transaction_event(span) + if not transaction_event: return - sentry_span.op = otel_span.name - - if isinstance(sentry_span, Transaction): - sentry_span.name = otel_span.name - sentry_span.set_context( - OTEL_SENTRY_CONTEXT, self._get_otel_context(otel_span) + spans = [] + for child in self._collect_children(span): + span_json = self._span_to_json(child) + if span_json: + spans.append(span_json) + transaction_event["spans"] = spans + # TODO-neel-potel sort and cutoff max spans + + sentry_sdk.capture_event(transaction_event) + + def _collect_children(self, span): + # type: (ReadableSpan) -> List[ReadableSpan] + if not span.context: + return [] + + children = [] + bfs_queue = deque() # type: Deque[int] + bfs_queue.append(span.context.span_id) + + while bfs_queue: + parent_span_id = bfs_queue.popleft() + node_children = self._children_spans.pop(parent_span_id, []) + children.extend(node_children) + bfs_queue.extend( + [child.context.span_id for child in node_children if child.context] ) - self._update_transaction_with_otel_data(sentry_span, otel_span) - - else: - self._update_span_with_otel_data(sentry_span, otel_span) - end_timestamp = None - if otel_span.end_time is not None: - end_timestamp = datetime.fromtimestamp( - otel_span.end_time / 1e9, timezone.utc - ) # OTel spans have nanosecond precision + return children - sentry_span.finish(end_timestamp=end_timestamp) + # we construct the event from scratch here + # and not use the current Transaction class for easier refactoring + def _root_span_to_transaction_event(self, span): + # type: (ReadableSpan) -> Optional[Event] + if not span.context: + return None - if otel_span.start_time is not None: - span_start_in_minutes = int( - otel_span.start_time / 1e9 / 60 - ) # OTel spans have nanosecond precision - self.open_spans.setdefault(span_start_in_minutes, set()).discard(span_id) + event = self._common_span_transaction_attributes_as_json(span) + if event is None: + return None - self._prune_old_spans() + transaction_name, transaction_source = extract_transaction_name_source(span) + span_data = extract_span_data(span) + trace_context = get_trace_context(span, span_data=span_data) + contexts = {"trace": trace_context} - def _get_otel_context(self, otel_span): - # type: (OTelSpan) -> dict[str, Any] - """ - Returns the OTel context for Sentry. - See: https://develop.sentry.dev/sdk/performance/opentelemetry/#step-5-add-opentelemetry-context - """ - ctx = {} + profile_context = get_profile_context(span) + if profile_context: + contexts["profile"] = profile_context - if otel_span.attributes: - ctx["attributes"] = dict(otel_span.attributes) + (_, description, _, http_status, _) = span_data - if otel_span.resource.attributes: - ctx["resource"] = dict(otel_span.resource.attributes) - - return ctx + if http_status: + contexts["response"] = {"status_code": http_status} + + if span.resource.attributes: + contexts[OTEL_SENTRY_CONTEXT] = {"resource": dict(span.resource.attributes)} + + event.update( + { + "type": "transaction", + "transaction": transaction_name or description, + "transaction_info": {"source": transaction_source or "custom"}, + "contexts": contexts, + } + ) - def _get_trace_data(self, otel_span, parent_context): - # type: (OTelSpan, Optional[context_api.Context]) -> dict[str, Any] - """ - Extracts tracing information from one OTel span and its parent OTel context. - """ - trace_data = {} # type: dict[str, Any] - span_context = otel_span.get_span_context() + profile = cast("Optional[Profile]", get_sentry_meta(span, "profile")) + if profile: + profile.__exit__(None, None, None) + if profile.valid(): + event["profile"] = profile + set_sentry_meta(span, "profile", None) - span_id = format_span_id(span_context.span_id) - trace_data["span_id"] = span_id + return event - trace_id = format_trace_id(span_context.trace_id) - trace_data["trace_id"] = trace_id + def _span_to_json(self, span): + # type: (ReadableSpan) -> Optional[dict[str, Any]] + if not span.context: + return None - parent_span_id = ( - format_span_id(otel_span.parent.span_id) if otel_span.parent else None + # This is a safe cast because dict[str, Any] is a superset of Event + span_json = cast( + "dict[str, Any]", self._common_span_transaction_attributes_as_json(span) ) - trace_data["parent_span_id"] = parent_span_id - - sentry_trace_data = get_value(SENTRY_TRACE_KEY, parent_context) - sentry_trace_data = cast("dict[str, Union[str, bool, None]]", sentry_trace_data) - trace_data["parent_sampled"] = ( - sentry_trace_data["parent_sampled"] if sentry_trace_data else None + if span_json is None: + return None + + trace_id = format_trace_id(span.context.trace_id) + span_id = format_span_id(span.context.span_id) + parent_span_id = format_span_id(span.parent.span_id) if span.parent else None + + (op, description, status, _, origin) = extract_span_data(span) + + span_json.update( + { + "trace_id": trace_id, + "span_id": span_id, + "op": op, + "description": description, + "status": status, + "origin": origin or DEFAULT_SPAN_ORIGIN, + } ) - baggage = get_value(SENTRY_BAGGAGE_KEY, parent_context) - trace_data["baggage"] = baggage + if status: + span_json.setdefault("tags", {})["status"] = status - return trace_data + if parent_span_id: + span_json["parent_span_id"] = parent_span_id - def _update_span_with_otel_data(self, sentry_span, otel_span): - # type: (SentrySpan, OTelSpan) -> None - """ - Convert OTel span data and update the Sentry span with it. - This should eventually happen on the server when ingesting the spans. - """ - sentry_span.set_data("otel.kind", otel_span.kind) + if span.attributes: + span_json["data"] = dict(span.attributes) - if otel_span.attributes is not None: - for key, val in otel_span.attributes.items(): - sentry_span.set_data(key, val) + return span_json - (op, description, status, http_status, _) = extract_span_data(otel_span) - sentry_span.op = op - sentry_span.description = description + def _common_span_transaction_attributes_as_json(self, span): + # type: (ReadableSpan) -> Optional[Event] + if not span.start_time or not span.end_time: + return None - if http_status: - sentry_span.set_http_status(http_status) - elif status: - sentry_span.set_status(status) + common_json = { + "start_timestamp": convert_from_otel_timestamp(span.start_time), + "timestamp": convert_from_otel_timestamp(span.end_time), + } # type: Event - def _update_transaction_with_otel_data(self, sentry_span, otel_span): - # type: (SentrySpan, OTelSpan) -> None - (op, _, status, http_status, _) = extract_span_data(otel_span) - sentry_span.op = op + measurements = extract_span_attributes(span, SentrySpanAttribute.MEASUREMENT) + if measurements: + common_json["measurements"] = measurements - if http_status: - sentry_span.set_http_status(http_status) - elif status: - sentry_span.set_status(status) + tags = extract_span_attributes(span, SentrySpanAttribute.TAG) + if tags: + common_json["tags"] = tags + + return common_json diff --git a/tests/integrations/opentelemetry/test_propagator.py b/tests/integrations/opentelemetry/test_propagator.py index d999b0bb2b..b318dccdf7 100644 --- a/tests/integrations/opentelemetry/test_propagator.py +++ b/tests/integrations/opentelemetry/test_propagator.py @@ -3,14 +3,10 @@ from unittest import mock from unittest.mock import MagicMock -from opentelemetry.context import get_current -from opentelemetry.trace import ( - SpanContext, - TraceFlags, - set_span_in_context, -) from opentelemetry.trace.propagation import get_current_span +from opentelemetry.propagators.textmap import DefaultSetter +import sentry_sdk from sentry_sdk.integrations.opentelemetry.consts import ( SENTRY_BAGGAGE_KEY, SENTRY_TRACE_KEY, @@ -123,178 +119,44 @@ def test_extract_context_sentry_trace_header_baggage(): assert span_context.trace_id == int("1234567890abcdef1234567890abcdef", 16) -@pytest.mark.forked -def test_inject_empty_otel_span_map(): - """ - Empty otel_span_map. - So there is no sentry_span to be found in inject() - and the function is returned early and no setters are called. - """ - carrier = None - context = get_current() - setter = MagicMock() - setter.set = MagicMock() - - span_context = SpanContext( - trace_id=int("1234567890abcdef1234567890abcdef", 16), - span_id=int("1234567890abcdef", 16), - trace_flags=TraceFlags(TraceFlags.SAMPLED), - is_remote=True, - ) - span = MagicMock() - span.get_span_context.return_value = span_context - - with mock.patch( - "sentry_sdk.integrations.opentelemetry.propagator.trace.get_current_span", - return_value=span, - ): - full_context = set_span_in_context(span, context) - SentryPropagator().inject(carrier, full_context, setter) - - setter.set.assert_not_called() - - -@pytest.mark.forked -def test_inject_sentry_span_no_baggage(): - """ - Inject a sentry span with no baggage. - """ - carrier = None - context = get_current() - setter = MagicMock() - setter.set = MagicMock() - - trace_id = "1234567890abcdef1234567890abcdef" - span_id = "1234567890abcdef" - - span_context = SpanContext( - trace_id=int(trace_id, 16), - span_id=int(span_id, 16), - trace_flags=TraceFlags(TraceFlags.SAMPLED), - is_remote=True, - ) - span = MagicMock() - span.get_span_context.return_value = span_context - - sentry_span = MagicMock() - sentry_span.to_traceparent = mock.Mock( - return_value="1234567890abcdef1234567890abcdef-1234567890abcdef-1" - ) - sentry_span.containing_transaction.get_baggage = mock.Mock(return_value=None) - - span_processor = SentrySpanProcessor() - span_processor.otel_span_map[span_id] = sentry_span - - with mock.patch( - "sentry_sdk.integrations.opentelemetry.propagator.trace.get_current_span", - return_value=span, - ): - full_context = set_span_in_context(span, context) - SentryPropagator().inject(carrier, full_context, setter) - - setter.set.assert_called_once_with( - carrier, - "sentry-trace", - "1234567890abcdef1234567890abcdef-1234567890abcdef-1", - ) +def test_inject_continue_trace(sentry_init, SortedBaggage): + sentry_init(traces_sample_rate=1.0) + carrier = {} + setter = DefaultSetter() -def test_inject_sentry_span_empty_baggage(): - """ - Inject a sentry span with no baggage. - """ - carrier = None - context = get_current() - setter = MagicMock() - setter.set = MagicMock() - - trace_id = "1234567890abcdef1234567890abcdef" - span_id = "1234567890abcdef" - - span_context = SpanContext( - trace_id=int(trace_id, 16), - span_id=int(span_id, 16), - trace_flags=TraceFlags(TraceFlags.SAMPLED), - is_remote=True, + trace_id = "771a43a4192642f0b136d5159a501700" + sentry_trace = "771a43a4192642f0b136d5159a501700-1234567890abcdef-1" + baggage = ( + "sentry-trace_id=771a43a4192642f0b136d5159a501700," + "sentry-public_key=frontendpublickey," + "sentry-sample_rate=0.01337," + "sentry-sampled=true," + "sentry-release=myfrontend," + "sentry-environment=bird," + "sentry-transaction=bar" ) - span = MagicMock() - span.get_span_context.return_value = span_context + incoming_headers = { + "HTTP_SENTRY_TRACE": sentry_trace, + "HTTP_BAGGAGE": baggage, + } - sentry_span = MagicMock() - sentry_span.to_traceparent = mock.Mock( - return_value="1234567890abcdef1234567890abcdef-1234567890abcdef-1" - ) - sentry_span.containing_transaction.get_baggage = mock.Mock(return_value=Baggage({})) - - span_processor = SentrySpanProcessor() - span_processor.otel_span_map[span_id] = sentry_span - - with mock.patch( - "sentry_sdk.integrations.opentelemetry.propagator.trace.get_current_span", - return_value=span, - ): - full_context = set_span_in_context(span, context) - SentryPropagator().inject(carrier, full_context, setter) - - setter.set.assert_called_once_with( - carrier, - "sentry-trace", - "1234567890abcdef1234567890abcdef-1234567890abcdef-1", - ) + with sentry_sdk.continue_trace(incoming_headers): + with sentry_sdk.start_span(name="foo") as span: + SentryPropagator().inject(carrier, setter=setter) + assert(carrier["sentry-trace"]) == f"{trace_id}-{span.span_id}-1" + assert(carrier["baggage"]) == SortedBaggage(baggage) -def test_inject_sentry_span_baggage(): - """ - Inject a sentry span with baggage. - """ - carrier = None - context = get_current() - setter = MagicMock() - setter.set = MagicMock() - - trace_id = "1234567890abcdef1234567890abcdef" - span_id = "1234567890abcdef" - - span_context = SpanContext( - trace_id=int(trace_id, 16), - span_id=int(span_id, 16), - trace_flags=TraceFlags(TraceFlags.SAMPLED), - is_remote=True, - ) - span = MagicMock() - span.get_span_context.return_value = span_context +def test_inject_head_sdk(sentry_init, SortedBaggage): + sentry_init(traces_sample_rate=1.0, release="release") - sentry_span = MagicMock() - sentry_span.to_traceparent = mock.Mock( - return_value="1234567890abcdef1234567890abcdef-1234567890abcdef-1" - ) - sentry_items = { - "sentry-trace_id": "771a43a4192642f0b136d5159a501700", - "sentry-public_key": "49d0f7386ad645858ae85020e393bef3", - "sentry-sample_rate": 0.01337, - "sentry-user_id": "Amélie", - } - baggage = Baggage(sentry_items=sentry_items) - sentry_span.containing_transaction.get_baggage = MagicMock(return_value=baggage) - - span_processor = SentrySpanProcessor() - span_processor.otel_span_map[span_id] = sentry_span - - with mock.patch( - "sentry_sdk.integrations.opentelemetry.propagator.trace.get_current_span", - return_value=span, - ): - full_context = set_span_in_context(span, context) - SentryPropagator().inject(carrier, full_context, setter) - - setter.set.assert_any_call( - carrier, - "sentry-trace", - "1234567890abcdef1234567890abcdef-1234567890abcdef-1", - ) + carrier = {} + setter = DefaultSetter() - setter.set.assert_any_call( - carrier, - "baggage", - baggage.serialize(), + with sentry_sdk.start_span(name="foo") as span: + SentryPropagator().inject(carrier, setter=setter) + assert(carrier["sentry-trace"]) == f"{span.trace_id}-{span.span_id}-1" + assert(carrier["baggage"]) == SortedBaggage( + f"sentry-transaction=foo,sentry-release=release,sentry-environment=production,sentry-trace_id={span.trace_id},sentry-sample_rate=1.0,sentry-sampled=true" ) diff --git a/tests/integrations/opentelemetry/test_span_processor.py b/tests/integrations/opentelemetry/test_span_processor.py deleted file mode 100644 index f37f4a619d..0000000000 --- a/tests/integrations/opentelemetry/test_span_processor.py +++ /dev/null @@ -1,576 +0,0 @@ -import time -from datetime import datetime, timezone -from unittest import mock -from unittest.mock import MagicMock - -from opentelemetry.trace import SpanKind, SpanContext, Status, StatusCode - -import sentry_sdk -from sentry_sdk.integrations.opentelemetry.span_processor import ( - SentrySpanProcessor, - link_trace_context_to_error_event, -) -from sentry_sdk.integrations.opentelemetry.utils import is_sentry_span -from sentry_sdk.tracing import Span, Transaction -from sentry_sdk.tracing_utils import extract_sentrytrace_data - - -def test_is_sentry_span(): - otel_span = MagicMock() - - assert not is_sentry_span(otel_span) - - client = MagicMock() - client.dsn = "https://1234567890abcdef@o123456.ingest.sentry.io/123456" - sentry_sdk.get_global_scope().set_client(client) - - assert not is_sentry_span(otel_span) - - otel_span.attributes = { - "http.url": "https://example.com", - } - assert not is_sentry_span(otel_span) - - otel_span.attributes = { - "http.url": "https://o123456.ingest.sentry.io/api/123/envelope", - } - assert is_sentry_span(otel_span) - - -def test_get_otel_context(): - otel_span = MagicMock() - otel_span.attributes = {"foo": "bar"} - otel_span.resource = MagicMock() - otel_span.resource.attributes = {"baz": "qux"} - - span_processor = SentrySpanProcessor() - otel_context = span_processor._get_otel_context(otel_span) - - assert otel_context == { - "attributes": {"foo": "bar"}, - "resource": {"baz": "qux"}, - } - - -def test_get_trace_data_with_span_and_trace(): - otel_span = MagicMock() - span_context = SpanContext( - trace_id=int("1234567890abcdef1234567890abcdef", 16), - span_id=int("1234567890abcdef", 16), - is_remote=True, - ) - otel_span.get_span_context.return_value = span_context - otel_span.parent = None - - parent_context = {} - - span_processor = SentrySpanProcessor() - sentry_trace_data = span_processor._get_trace_data(otel_span, parent_context) - assert sentry_trace_data["trace_id"] == "1234567890abcdef1234567890abcdef" - assert sentry_trace_data["span_id"] == "1234567890abcdef" - assert sentry_trace_data["parent_span_id"] is None - assert sentry_trace_data["parent_sampled"] is None - assert sentry_trace_data["baggage"] is None - - -def test_get_trace_data_with_span_and_trace_and_parent(): - otel_span = MagicMock() - span_context = SpanContext( - trace_id=int("1234567890abcdef1234567890abcdef", 16), - span_id=int("1234567890abcdef", 16), - is_remote=True, - ) - otel_span.get_span_context.return_value = span_context - otel_span.parent = MagicMock() - otel_span.parent.span_id = int("abcdef1234567890", 16) - - parent_context = {} - - span_processor = SentrySpanProcessor() - sentry_trace_data = span_processor._get_trace_data(otel_span, parent_context) - assert sentry_trace_data["trace_id"] == "1234567890abcdef1234567890abcdef" - assert sentry_trace_data["span_id"] == "1234567890abcdef" - assert sentry_trace_data["parent_span_id"] == "abcdef1234567890" - assert sentry_trace_data["parent_sampled"] is None - assert sentry_trace_data["baggage"] is None - - -def test_get_trace_data_with_sentry_trace(): - otel_span = MagicMock() - span_context = SpanContext( - trace_id=int("1234567890abcdef1234567890abcdef", 16), - span_id=int("1234567890abcdef", 16), - is_remote=True, - ) - otel_span.get_span_context.return_value = span_context - otel_span.parent = MagicMock() - otel_span.parent.span_id = int("abcdef1234567890", 16) - - parent_context = {} - - with mock.patch( - "sentry_sdk.integrations.opentelemetry.span_processor.get_value", - side_effect=[ - extract_sentrytrace_data( - "1234567890abcdef1234567890abcdef-1234567890abcdef-1" - ), - None, - ], - ): - span_processor = SentrySpanProcessor() - sentry_trace_data = span_processor._get_trace_data(otel_span, parent_context) - assert sentry_trace_data["trace_id"] == "1234567890abcdef1234567890abcdef" - assert sentry_trace_data["span_id"] == "1234567890abcdef" - assert sentry_trace_data["parent_span_id"] == "abcdef1234567890" - assert sentry_trace_data["parent_sampled"] is True - assert sentry_trace_data["baggage"] is None - - with mock.patch( - "sentry_sdk.integrations.opentelemetry.span_processor.get_value", - side_effect=[ - extract_sentrytrace_data( - "1234567890abcdef1234567890abcdef-1234567890abcdef-0" - ), - None, - ], - ): - span_processor = SentrySpanProcessor() - sentry_trace_data = span_processor._get_trace_data(otel_span, parent_context) - assert sentry_trace_data["trace_id"] == "1234567890abcdef1234567890abcdef" - assert sentry_trace_data["span_id"] == "1234567890abcdef" - assert sentry_trace_data["parent_span_id"] == "abcdef1234567890" - assert sentry_trace_data["parent_sampled"] is False - assert sentry_trace_data["baggage"] is None - - -def test_get_trace_data_with_sentry_trace_and_baggage(): - otel_span = MagicMock() - span_context = SpanContext( - trace_id=int("1234567890abcdef1234567890abcdef", 16), - span_id=int("1234567890abcdef", 16), - is_remote=True, - ) - otel_span.get_span_context.return_value = span_context - otel_span.parent = MagicMock() - otel_span.parent.span_id = int("abcdef1234567890", 16) - - parent_context = {} - - baggage = ( - "sentry-trace_id=771a43a4192642f0b136d5159a501700," - "sentry-public_key=49d0f7386ad645858ae85020e393bef3," - "sentry-sample_rate=0.01337,sentry-user_id=Am%C3%A9lie" - ) - - with mock.patch( - "sentry_sdk.integrations.opentelemetry.span_processor.get_value", - side_effect=[ - extract_sentrytrace_data( - "1234567890abcdef1234567890abcdef-1234567890abcdef-1" - ), - baggage, - ], - ): - span_processor = SentrySpanProcessor() - sentry_trace_data = span_processor._get_trace_data(otel_span, parent_context) - assert sentry_trace_data["trace_id"] == "1234567890abcdef1234567890abcdef" - assert sentry_trace_data["span_id"] == "1234567890abcdef" - assert sentry_trace_data["parent_span_id"] == "abcdef1234567890" - assert sentry_trace_data["parent_sampled"] - assert sentry_trace_data["baggage"] == baggage - - -def test_update_span_with_otel_data_http_method(): - sentry_span = Span() - - otel_span = MagicMock() - otel_span.name = "Test OTel Span" - otel_span.kind = SpanKind.CLIENT - otel_span.attributes = { - "http.method": "GET", - "http.status_code": 429, - "http.status_text": "xxx", - "http.user_agent": "curl/7.64.1", - "net.peer.name": "example.com", - "http.target": "/", - } - - span_processor = SentrySpanProcessor() - span_processor._update_span_with_otel_data(sentry_span, otel_span) - - assert sentry_span.op == "http.client" - assert sentry_span.description == "GET /" - assert sentry_span.status == "resource_exhausted" - - assert sentry_span._data["http.method"] == "GET" - assert sentry_span._data["http.response.status_code"] == 429 - assert sentry_span._data["http.status_text"] == "xxx" - assert sentry_span._data["http.user_agent"] == "curl/7.64.1" - assert sentry_span._data["net.peer.name"] == "example.com" - assert sentry_span._data["http.target"] == "/" - - -def test_update_span_with_otel_data_http_method2(): - sentry_span = Span() - - otel_span = MagicMock() - otel_span.name = "Test OTel Span" - otel_span.kind = SpanKind.SERVER - otel_span.attributes = { - "http.method": "GET", - "http.status_code": 429, - "http.status_text": "xxx", - "http.user_agent": "curl/7.64.1", - "http.url": "https://example.com/status/403?password=123&username=test@example.com&author=User123&auth=1234567890abcdef", - } - - span_processor = SentrySpanProcessor() - span_processor._update_span_with_otel_data(sentry_span, otel_span) - - assert sentry_span.op == "http.server" - assert sentry_span.description == "GET https://example.com/status/403" - assert sentry_span.status == "resource_exhausted" - - assert sentry_span._data["http.method"] == "GET" - assert sentry_span._data["http.response.status_code"] == 429 - assert sentry_span._data["http.status_text"] == "xxx" - assert sentry_span._data["http.user_agent"] == "curl/7.64.1" - assert ( - sentry_span._data["http.url"] - == "https://example.com/status/403?password=123&username=test@example.com&author=User123&auth=1234567890abcdef" - ) - - -def test_update_span_with_otel_data_db_query(): - sentry_span = Span() - - otel_span = MagicMock() - otel_span.name = "Test OTel Span" - otel_span.attributes = { - "db.system": "postgresql", - "db.statement": "SELECT * FROM table where pwd = '123456'", - } - - span_processor = SentrySpanProcessor() - span_processor._update_span_with_otel_data(sentry_span, otel_span) - - assert sentry_span.op == "db" - assert sentry_span.description == "SELECT * FROM table where pwd = '123456'" - - assert sentry_span._data["db.system"] == "postgresql" - assert ( - sentry_span._data["db.statement"] == "SELECT * FROM table where pwd = '123456'" - ) - - -def test_on_start_transaction(): - otel_span = MagicMock() - otel_span.name = "Sample OTel Span" - otel_span.start_time = time.time_ns() - span_context = SpanContext( - trace_id=int("1234567890abcdef1234567890abcdef", 16), - span_id=int("1234567890abcdef", 16), - is_remote=True, - ) - otel_span.get_span_context.return_value = span_context - otel_span.parent = MagicMock() - otel_span.parent.span_id = int("abcdef1234567890", 16) - - parent_context = {} - - fake_start_transaction = MagicMock() - - fake_client = MagicMock() - fake_client.dsn = "https://1234567890abcdef@o123456.ingest.sentry.io/123456" - sentry_sdk.get_global_scope().set_client(fake_client) - - with mock.patch( - "sentry_sdk.integrations.opentelemetry.span_processor.start_transaction", - fake_start_transaction, - ): - span_processor = SentrySpanProcessor() - span_processor.on_start(otel_span, parent_context) - - fake_start_transaction.assert_called_once_with( - name="Sample OTel Span", - span_id="1234567890abcdef", - parent_span_id="abcdef1234567890", - trace_id="1234567890abcdef1234567890abcdef", - baggage=None, - start_timestamp=datetime.fromtimestamp( - otel_span.start_time / 1e9, timezone.utc - ), - origin="auto.otel", - ) - - assert len(span_processor.otel_span_map.keys()) == 1 - assert list(span_processor.otel_span_map.keys())[0] == "1234567890abcdef" - - -def test_on_start_child(): - otel_span = MagicMock() - otel_span.name = "Sample OTel Span" - otel_span.start_time = time.time_ns() - span_context = SpanContext( - trace_id=int("1234567890abcdef1234567890abcdef", 16), - span_id=int("1234567890abcdef", 16), - is_remote=True, - ) - otel_span.get_span_context.return_value = span_context - otel_span.parent = MagicMock() - otel_span.parent.span_id = int("abcdef1234567890", 16) - - parent_context = {} - - fake_client = MagicMock() - fake_client.dsn = "https://1234567890abcdef@o123456.ingest.sentry.io/123456" - sentry_sdk.get_global_scope().set_client(fake_client) - - fake_span = MagicMock() - - span_processor = SentrySpanProcessor() - span_processor.otel_span_map["abcdef1234567890"] = fake_span - span_processor.on_start(otel_span, parent_context) - - fake_span.start_child.assert_called_once_with( - span_id="1234567890abcdef", - name="Sample OTel Span", - start_timestamp=datetime.fromtimestamp( - otel_span.start_time / 1e9, timezone.utc - ), - origin="auto.otel", - ) - - assert len(span_processor.otel_span_map.keys()) == 2 - assert "abcdef1234567890" in span_processor.otel_span_map.keys() - assert "1234567890abcdef" in span_processor.otel_span_map.keys() - - -def test_on_end_no_sentry_span(): - """ - If on_end is called on a span that is not in the otel_span_map, it should be a no-op. - """ - otel_span = MagicMock() - otel_span.name = "Sample OTel Span" - otel_span.end_time = time.time_ns() - span_context = SpanContext( - trace_id=int("1234567890abcdef1234567890abcdef", 16), - span_id=int("1234567890abcdef", 16), - is_remote=True, - ) - otel_span.get_span_context.return_value = span_context - - span_processor = SentrySpanProcessor() - span_processor.otel_span_map = {} - span_processor._get_otel_context = MagicMock() - span_processor._update_span_with_otel_data = MagicMock() - - span_processor.on_end(otel_span) - - span_processor._get_otel_context.assert_not_called() - span_processor._update_span_with_otel_data.assert_not_called() - - -def test_on_end_sentry_transaction(): - """ - Test on_end for a sentry Transaction. - """ - otel_span = MagicMock() - otel_span.name = "Sample OTel Span" - otel_span.end_time = time.time_ns() - otel_span.status = Status(StatusCode.OK) - span_context = SpanContext( - trace_id=int("1234567890abcdef1234567890abcdef", 16), - span_id=int("1234567890abcdef", 16), - is_remote=True, - ) - otel_span.get_span_context.return_value = span_context - - fake_client = MagicMock() - sentry_sdk.get_global_scope().set_client(fake_client) - - fake_sentry_span = MagicMock(spec=Transaction) - fake_sentry_span.set_context = MagicMock() - fake_sentry_span.finish = MagicMock() - - span_processor = SentrySpanProcessor() - span_processor._get_otel_context = MagicMock() - span_processor._update_span_with_otel_data = MagicMock() - span_processor._update_transaction_with_otel_data = MagicMock() - span_processor.otel_span_map["1234567890abcdef"] = fake_sentry_span - - span_processor.on_end(otel_span) - - fake_sentry_span.set_context.assert_called_once() - span_processor._update_span_with_otel_data.assert_not_called() - span_processor._update_transaction_with_otel_data.assert_called_once() - fake_sentry_span.finish.assert_called_once() - - -def test_on_end_sentry_span(): - """ - Test on_end for a sentry Span. - """ - otel_span = MagicMock() - otel_span.name = "Sample OTel Span" - otel_span.end_time = time.time_ns() - otel_span.status = Status(StatusCode.OK) - span_context = SpanContext( - trace_id=int("1234567890abcdef1234567890abcdef", 16), - span_id=int("1234567890abcdef", 16), - is_remote=True, - ) - otel_span.get_span_context.return_value = span_context - - fake_client = MagicMock() - sentry_sdk.get_global_scope().set_client(fake_client) - - fake_sentry_span = MagicMock(spec=Span) - fake_sentry_span.set_context = MagicMock() - fake_sentry_span.finish = MagicMock() - - span_processor = SentrySpanProcessor() - span_processor._get_otel_context = MagicMock() - span_processor._update_span_with_otel_data = MagicMock() - span_processor.otel_span_map["1234567890abcdef"] = fake_sentry_span - - span_processor.on_end(otel_span) - - fake_sentry_span.set_context.assert_not_called() - span_processor._update_span_with_otel_data.assert_called_once_with( - fake_sentry_span, otel_span - ) - fake_sentry_span.finish.assert_called_once() - - -def test_link_trace_context_to_error_event(): - """ - Test that the trace context is added to the error event. - """ - fake_client = MagicMock() - sentry_sdk.get_global_scope().set_client(fake_client) - - span_id = "1234567890abcdef" - trace_id = "1234567890abcdef1234567890abcdef" - - fake_trace_context = { - "bla": "blub", - "foo": "bar", - "baz": 123, - } - - sentry_span = MagicMock() - sentry_span.get_trace_context = MagicMock(return_value=fake_trace_context) - - otel_span_map = { - span_id: sentry_span, - } - - span_context = SpanContext( - trace_id=int(trace_id, 16), - span_id=int(span_id, 16), - is_remote=True, - ) - otel_span = MagicMock() - otel_span.get_span_context = MagicMock(return_value=span_context) - - fake_event = {"event_id": "1234567890abcdef1234567890abcdef"} - - with mock.patch( - "sentry_sdk.integrations.opentelemetry.span_processor.get_current_span", - return_value=otel_span, - ): - event = link_trace_context_to_error_event(fake_event, otel_span_map) - - assert event - assert event == fake_event # the event is changed in place inside the function - assert "contexts" in event - assert "trace" in event["contexts"] - assert event["contexts"]["trace"] == fake_trace_context - - -def test_pruning_old_spans_on_start(): - otel_span = MagicMock() - otel_span.name = "Sample OTel Span" - otel_span.start_time = time.time_ns() - span_context = SpanContext( - trace_id=int("1234567890abcdef1234567890abcdef", 16), - span_id=int("1234567890abcdef", 16), - is_remote=True, - ) - otel_span.get_span_context.return_value = span_context - otel_span.parent = MagicMock() - otel_span.parent.span_id = int("abcdef1234567890", 16) - - parent_context = {} - fake_client = MagicMock() - fake_client.options = {"debug": False} - fake_client.dsn = "https://1234567890abcdef@o123456.ingest.sentry.io/123456" - sentry_sdk.get_global_scope().set_client(fake_client) - - span_processor = SentrySpanProcessor() - - span_processor.otel_span_map = { - "111111111abcdef": MagicMock(), # should stay - "2222222222abcdef": MagicMock(), # should go - "3333333333abcdef": MagicMock(), # should go - } - current_time_minutes = int(time.time() / 60) - span_processor.open_spans = { - current_time_minutes - 3: {"111111111abcdef"}, # should stay - current_time_minutes - - 11: {"2222222222abcdef", "3333333333abcdef"}, # should go - } - - span_processor.on_start(otel_span, parent_context) - assert sorted(list(span_processor.otel_span_map.keys())) == [ - "111111111abcdef", - "1234567890abcdef", - ] - assert sorted(list(span_processor.open_spans.values())) == [ - {"111111111abcdef"}, - {"1234567890abcdef"}, - ] - - -def test_pruning_old_spans_on_end(): - otel_span = MagicMock() - otel_span.name = "Sample OTel Span" - otel_span.start_time = time.time_ns() - span_context = SpanContext( - trace_id=int("1234567890abcdef1234567890abcdef", 16), - span_id=int("1234567890abcdef", 16), - is_remote=True, - ) - otel_span.get_span_context.return_value = span_context - otel_span.parent = MagicMock() - otel_span.parent.span_id = int("abcdef1234567890", 16) - - fake_client = MagicMock() - sentry_sdk.get_global_scope().set_client(fake_client) - - fake_sentry_span = MagicMock(spec=Span) - fake_sentry_span.set_context = MagicMock() - fake_sentry_span.finish = MagicMock() - - span_processor = SentrySpanProcessor() - span_processor._get_otel_context = MagicMock() - span_processor._update_span_with_otel_data = MagicMock() - - span_processor.otel_span_map = { - "111111111abcdef": MagicMock(), # should stay - "2222222222abcdef": MagicMock(), # should go - "3333333333abcdef": MagicMock(), # should go - "1234567890abcdef": fake_sentry_span, # should go (because it is closed) - } - current_time_minutes = int(time.time() / 60) - span_processor.open_spans = { - current_time_minutes: {"1234567890abcdef"}, # should go (because it is closed) - current_time_minutes - 3: {"111111111abcdef"}, # should stay - current_time_minutes - - 11: {"2222222222abcdef", "3333333333abcdef"}, # should go - } - - span_processor.on_end(otel_span) - assert sorted(list(span_processor.otel_span_map.keys())) == ["111111111abcdef"] - assert sorted(list(span_processor.open_spans.values())) == [{"111111111abcdef"}] From f0a6f5e9d5ef62d4f690595c93a6b18e63b9efd5 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Mon, 2 Dec 2024 16:32:56 +0100 Subject: [PATCH 134/244] Reverted the streaming response code in potel --- sentry_sdk/integrations/wsgi.py | 96 +++++++++++---------------------- 1 file changed, 32 insertions(+), 64 deletions(-) diff --git a/sentry_sdk/integrations/wsgi.py b/sentry_sdk/integrations/wsgi.py index 816c9b0a6a..70324a3641 100644 --- a/sentry_sdk/integrations/wsgi.py +++ b/sentry_sdk/integrations/wsgi.py @@ -1,4 +1,5 @@ import sys +from contextlib import nullcontext from functools import partial import sentry_sdk @@ -11,7 +12,6 @@ ) from sentry_sdk.sessions import track_session from sentry_sdk.tracing import Transaction, TRANSACTION_SOURCE_ROUTE -from sentry_sdk.tracing_utils import finish_running_transaction from sentry_sdk.utils import ( ContextVar, capture_internal_exceptions, @@ -44,9 +44,6 @@ def __call__(self, status, response_headers, exc_info=None): # type: ignore pass -MAX_TRANSACTION_DURATION_SECONDS = 5 * 60 - - _wsgi_middleware_applied = ContextVar("sentry_wsgi_middleware_applied") DEFAULT_TRANSACTION_NAME = "generic WSGI request" @@ -113,7 +110,6 @@ def __call__(self, environ, start_response): scope.set_transaction_name( DEFAULT_TRANSACTION_NAME, source=TRANSACTION_SOURCE_ROUTE ) - current_scope = sentry_sdk.get_current_scope() with track_session(scope, session_mode="request"): with capture_internal_exceptions(): @@ -124,14 +120,11 @@ def __call__(self, environ, start_response): environ, self.use_x_forwarded_for ) ) - method = environ.get("REQUEST_METHOD", "").upper() should_trace = method in self.http_methods_to_capture - transaction = None - with sentry_sdk.continue_trace(environ): - if should_trace: - transaction = sentry_sdk.start_span( + with ( + sentry_sdk.start_span( op=OP.HTTP_SERVER, name=DEFAULT_TRANSACTION_NAME, source=TRANSACTION_SOURCE_ROUTE, @@ -140,9 +133,9 @@ def __call__(self, environ, start_response): environ, self.use_x_forwarded_for ), ) - transaction.__enter__() - current_scope = transaction.scope - + if should_trace + else nullcontext() + ) as transaction: try: response = self.app( environ, @@ -153,20 +146,12 @@ def __call__(self, environ, start_response): ), ) except BaseException: - exc_info = sys.exc_info() - _capture_exception(exc_info) - finish_running_transaction(transaction, exc_info) - reraise(*exc_info) + reraise(*_capture_exception()) finally: _wsgi_middleware_applied.set(False) - return _ScopedResponse( - response=response, - current_scope=current_scope, - isolation_scope=scope, - transaction=transaction, - ) + return _ScopedResponse(scope, response) def _sentry_start_response( # type: ignore @@ -228,13 +213,13 @@ def get_client_ip(environ): return environ.get("REMOTE_ADDR") -def _capture_exception(exc_info=None): - # type: (Optional[ExcInfo]) -> ExcInfo +def _capture_exception(): + # type: () -> ExcInfo """ Captures the current exception and sends it to Sentry. Returns the ExcInfo tuple to it can be reraised afterwards. """ - exc_info = exc_info or sys.exc_info() + exc_info = sys.exc_info() e = exc_info[1] # SystemExit(0) is the only uncaught exception that is expected behavior @@ -252,7 +237,7 @@ def _capture_exception(exc_info=None): class _ScopedResponse: """ - Use separate scopes for each response chunk. + Users a separate scope for each response chunk. This will make WSGI apps more tolerant against: - WSGI servers streaming responses from a different thread/from @@ -261,54 +246,37 @@ class _ScopedResponse: - WSGI servers streaming responses interleaved from the same thread """ - __slots__ = ("_response", "_current_scope", "_isolation_scope", "_transaction") + __slots__ = ("_response", "_scope") - def __init__( - self, - response, # type: Iterator[bytes] - current_scope, # type: sentry_sdk.scope.Scope - isolation_scope, # type: sentry_sdk.scope.Scope - transaction=None, # type: Optional[Transaction] - ): - # type: (...) -> None + def __init__(self, scope, response): + # type: (sentry_sdk.scope.Scope, Iterator[bytes]) -> None + self._scope = scope self._response = response - self._current_scope = current_scope - self._isolation_scope = isolation_scope - self._transaction = transaction def __iter__(self): # type: () -> Iterator[bytes] iterator = iter(self._response) - try: - while True: - with sentry_sdk.use_isolation_scope(self._isolation_scope): - with sentry_sdk.use_scope(self._current_scope): - try: - chunk = next(iterator) - except StopIteration: - break - except BaseException: - reraise(*_capture_exception()) - - yield chunk + while True: + with sentry_sdk.use_isolation_scope(self._scope): + try: + chunk = next(iterator) + except StopIteration: + break + except BaseException: + reraise(*_capture_exception()) - finally: - with sentry_sdk.use_isolation_scope(self._isolation_scope): - with sentry_sdk.use_scope(self._current_scope): - finish_running_transaction(transaction=self._transaction) + yield chunk def close(self): # type: () -> None - with sentry_sdk.use_isolation_scope(self._isolation_scope): - with sentry_sdk.use_scope(self._current_scope): - try: - finish_running_transaction(transaction=self._transaction) - self._response.close() # type: ignore - except AttributeError: - pass - except BaseException: - reraise(*_capture_exception()) + with sentry_sdk.use_isolation_scope(self._scope): + try: + self._response.close() # type: ignore + except AttributeError: + pass + except BaseException: + reraise(*_capture_exception()) def _make_wsgi_event_processor(environ, use_x_forwarded_for): From a962fab955ac494333c4f9f07b8985446f392dd3 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Mon, 2 Dec 2024 16:39:51 +0100 Subject: [PATCH 135/244] fixed import --- sentry_sdk/integrations/wsgi.py | 1 - 1 file changed, 1 deletion(-) diff --git a/sentry_sdk/integrations/wsgi.py b/sentry_sdk/integrations/wsgi.py index 4f7b99c35a..726a310482 100644 --- a/sentry_sdk/integrations/wsgi.py +++ b/sentry_sdk/integrations/wsgi.py @@ -9,7 +9,6 @@ from sentry_sdk.integrations._wsgi_common import ( DEFAULT_HTTP_METHODS_TO_CAPTURE, _filter_headers, - nullcontext, ) from sentry_sdk.sessions import track_session from sentry_sdk.scope import use_isolation_scope From 6e1b1cbfcc60c5c8ea06cec9456744fbbd25c0b4 Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Tue, 3 Dec 2024 14:43:24 +0100 Subject: [PATCH 136/244] Fix initial scope handling (#3837) --- .../integrations/opentelemetry/integration.py | 2 ++ .../integrations/opentelemetry/propagator.py | 2 +- sentry_sdk/integrations/opentelemetry/scope.py | 15 +++++++++++---- tests/conftest.py | 2 +- tests/integrations/stdlib/test_subprocess.py | 10 +++++----- 5 files changed, 20 insertions(+), 11 deletions(-) diff --git a/sentry_sdk/integrations/opentelemetry/integration.py b/sentry_sdk/integrations/opentelemetry/integration.py index 231bb4f32b..013575dfa7 100644 --- a/sentry_sdk/integrations/opentelemetry/integration.py +++ b/sentry_sdk/integrations/opentelemetry/integration.py @@ -5,6 +5,7 @@ """ from sentry_sdk.integrations import DidNotEnable, Integration +from sentry_sdk.integrations.opentelemetry.scope import setup_initial_scopes from sentry_sdk.integrations.opentelemetry.propagator import SentryPropagator from sentry_sdk.integrations.opentelemetry.span_processor import ( SentrySpanProcessor, @@ -74,6 +75,7 @@ def _setup_scope_context_management(): import opentelemetry.context opentelemetry.context._RUNTIME_CONTEXT = SentryContextVarsRuntimeContext() + setup_initial_scopes() def _setup_sentry_tracing(): diff --git a/sentry_sdk/integrations/opentelemetry/propagator.py b/sentry_sdk/integrations/opentelemetry/propagator.py index 0c6eda27a2..37d6362f82 100644 --- a/sentry_sdk/integrations/opentelemetry/propagator.py +++ b/sentry_sdk/integrations/opentelemetry/propagator.py @@ -99,7 +99,7 @@ def inject(self, carrier, context=None, setter=default_setter): # TODO-neel-potel check trace_propagation_targets # TODO-neel-potel test propagator works with twp - for (key, value) in current_scope.iter_trace_propagation_headers(): + for key, value in current_scope.iter_trace_propagation_headers(): setter.set(carrier, key, value) @property diff --git a/sentry_sdk/integrations/opentelemetry/scope.py b/sentry_sdk/integrations/opentelemetry/scope.py index f6df844109..2e12cb53d4 100644 --- a/sentry_sdk/integrations/opentelemetry/scope.py +++ b/sentry_sdk/integrations/opentelemetry/scope.py @@ -1,7 +1,14 @@ from typing import cast from contextlib import contextmanager -from opentelemetry.context import get_value, set_value, attach, detach, get_current +from opentelemetry.context import ( + Context, + get_value, + set_value, + attach, + detach, + get_current, +) from opentelemetry.trace import ( SpanContext, NonRecordingSpan, @@ -136,13 +143,13 @@ def start_span(self, **kwargs): _INITIAL_ISOLATION_SCOPE = None -def _setup_initial_scopes(): +def setup_initial_scopes(): global _INITIAL_CURRENT_SCOPE, _INITIAL_ISOLATION_SCOPE _INITIAL_CURRENT_SCOPE = PotelScope(ty=ScopeType.CURRENT) _INITIAL_ISOLATION_SCOPE = PotelScope(ty=ScopeType.ISOLATION) - -_setup_initial_scopes() + scopes = (_INITIAL_CURRENT_SCOPE, _INITIAL_ISOLATION_SCOPE) + attach(set_value(SENTRY_SCOPES_KEY, scopes)) @contextmanager diff --git a/tests/conftest.py b/tests/conftest.py index cdac88aa2c..18b3ec3576 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -74,7 +74,7 @@ def clean_scopes(): scope._isolation_scope.set(None) scope._current_scope.set(None) - potel_scope._setup_initial_scopes() + potel_scope.setup_initial_scopes() @pytest.fixture(autouse=True) diff --git a/tests/integrations/stdlib/test_subprocess.py b/tests/integrations/stdlib/test_subprocess.py index 62d5a2aeba..8e3166e512 100644 --- a/tests/integrations/stdlib/test_subprocess.py +++ b/tests/integrations/stdlib/test_subprocess.py @@ -7,7 +7,7 @@ import pytest -from sentry_sdk import capture_exception, capture_message, start_transaction +from sentry_sdk import capture_exception, capture_message, start_span from sentry_sdk.integrations.stdlib import StdlibIntegration from tests.conftest import ApproxDict @@ -59,7 +59,7 @@ def test_subprocess_basic( sentry_init(integrations=[StdlibIntegration()], traces_sample_rate=1.0) events = capture_events() - with start_transaction(name="foo") as transaction: + with start_span(name="foo") as span: args = [ sys.executable, "-c", @@ -110,7 +110,7 @@ def test_subprocess_basic( assert os.environ == old_environ - assert transaction.trace_id in str(output) + assert span.trace_id in str(output) capture_message("hi") @@ -178,7 +178,7 @@ def test_subprocess_basic( def test_subprocess_empty_env(sentry_init, monkeypatch): monkeypatch.setenv("TEST_MARKER", "should_not_be_seen") sentry_init(integrations=[StdlibIntegration()], traces_sample_rate=1.0) - with start_transaction(name="foo"): + with start_span(name="foo"): args = [ sys.executable, "-c", @@ -201,7 +201,7 @@ def test_subprocess_span_origin(sentry_init, capture_events): sentry_init(integrations=[StdlibIntegration()], traces_sample_rate=1.0) events = capture_events() - with start_transaction(name="foo"): + with start_span(name="foo"): args = [ sys.executable, "-c", From c54ebed62e114ef0fc19e5e872386b7104dbb1d1 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Tue, 3 Dec 2024 16:44:08 +0100 Subject: [PATCH 137/244] fixed comment --- tests/integrations/clickhouse_driver/test_clickhouse_driver.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/integrations/clickhouse_driver/test_clickhouse_driver.py b/tests/integrations/clickhouse_driver/test_clickhouse_driver.py index 381cbaafd1..2c3d3c41a4 100644 --- a/tests/integrations/clickhouse_driver/test_clickhouse_driver.py +++ b/tests/integrations/clickhouse_driver/test_clickhouse_driver.py @@ -1,7 +1,7 @@ """ Tests need a local clickhouse instance running, this can best be done using ```sh -docker run -d -p 18123:8123 -p9000:9000 --name clickhouse-test --ulimit nofile=262144:262144 --rm clickhouse/clickhouse-server +docker run -d -p 18123:8123 -p9000:9000 --name clickhouse-test --ulimit nofile=262144:262144 --rm clickhouse ``` """ From de7e58b9f739729c909ed971d535c211e71a42fd Mon Sep 17 00:00:00 2001 From: Ivana Kellyer Date: Tue, 3 Dec 2024 16:45:58 +0100 Subject: [PATCH 138/244] Profiler sampling context (#3840) --- .../integrations/opentelemetry/sampler.py | 30 ++++++++++++------- .../opentelemetry/span_processor.py | 7 +++-- sentry_sdk/tracing_utils.py | 4 --- tests/tracing/test_sampling.py | 21 +++++++++++++ 4 files changed, 46 insertions(+), 16 deletions(-) diff --git a/sentry_sdk/integrations/opentelemetry/sampler.py b/sentry_sdk/integrations/opentelemetry/sampler.py index 0631c0b19e..8d886add09 100644 --- a/sentry_sdk/integrations/opentelemetry/sampler.py +++ b/sentry_sdk/integrations/opentelemetry/sampler.py @@ -17,7 +17,7 @@ from typing import TYPE_CHECKING if TYPE_CHECKING: - from typing import Optional, Sequence, Union + from typing import Any, Optional, Sequence, Union from opentelemetry.context import Context from opentelemetry.trace import Link, SpanKind from opentelemetry.trace.span import SpanContext @@ -152,15 +152,9 @@ def should_sample( has_traces_sampler = callable(client.options.get("traces_sampler")) if is_root_span and has_traces_sampler: - sampling_context = { - "transaction_context": { - "name": name, - "op": attributes.get(SentrySpanAttribute.OP), - "source": attributes.get(SentrySpanAttribute.SOURCE), - }, - "parent_sampled": get_parent_sampled(parent_span_context, trace_id), - } - sampling_context.update(attributes) + sampling_context = create_sampling_context( + name, attributes, parent_span_context, trace_id + ) sample_rate = client.options["traces_sampler"](sampling_context) else: # Check if there is a parent with a sampling decision @@ -193,3 +187,19 @@ def should_sample( def get_description(self) -> str: return self.__class__.__name__ + + +def create_sampling_context(name, attributes, parent_span_context, trace_id): + # type: (str, Attributes, SpanContext, str) -> dict[str, Any] + sampling_context = { + "transaction_context": { + "name": name, + "op": attributes.get(SentrySpanAttribute.OP), + "source": attributes.get(SentrySpanAttribute.SOURCE), + }, + "parent_sampled": get_parent_sampled(parent_span_context, trace_id), + } + + sampling_context.update(attributes) + + return sampling_context diff --git a/sentry_sdk/integrations/opentelemetry/span_processor.py b/sentry_sdk/integrations/opentelemetry/span_processor.py index 37f27d8cba..0b4c3387df 100644 --- a/sentry_sdk/integrations/opentelemetry/span_processor.py +++ b/sentry_sdk/integrations/opentelemetry/span_processor.py @@ -20,6 +20,7 @@ get_profiler_id, ) from sentry_sdk.profiler.transaction_profiler import Profile +from sentry_sdk.integrations.opentelemetry.sampler import create_sampling_context from sentry_sdk.integrations.opentelemetry.utils import ( is_sentry_span, convert_from_otel_timestamp, @@ -126,8 +127,10 @@ def _start_profile(self, span): # unix timestamp that is on span.start_time # setting it to 0 means the profiler will internally measure time on start profile = Profile(sampled, 0) - # TODO-neel-potel sampling context?? - profile._set_initial_sampling_decision(sampling_context={}) + sampling_context = create_sampling_context( + span.name, span.attributes, span.parent, span.context.trace_id + ) + profile._set_initial_sampling_decision(sampling_context) profile.__enter__() set_sentry_meta(span, "profile", profile) diff --git a/sentry_sdk/tracing_utils.py b/sentry_sdk/tracing_utils.py index 35c96fd7c9..4e2f46c81a 100644 --- a/sentry_sdk/tracing_utils.py +++ b/sentry_sdk/tracing_utils.py @@ -33,7 +33,6 @@ from typing import Generator from typing import Optional from typing import Union - from types import FrameType @@ -731,6 +730,3 @@ def get_current_span(scope=None): LOW_QUALITY_TRANSACTION_SOURCES, SENTRY_TRACE_HEADER_NAME, ) - -if TYPE_CHECKING: - from sentry_sdk.tracing import Span diff --git a/tests/tracing/test_sampling.py b/tests/tracing/test_sampling.py index db5a545b5c..49ddc55c7f 100644 --- a/tests/tracing/test_sampling.py +++ b/tests/tracing/test_sampling.py @@ -307,3 +307,24 @@ def test_records_lost_event_only_if_traces_sampler_enabled( # Use Counter because order of calls does not matter assert Counter(record_lost_event_calls) == Counter(expected_record_lost_event_calls) + + +@pytest.mark.parametrize("parent_sampling_decision", [True, False]) +def test_profiles_sampler_gets_sampling_context(sentry_init, parent_sampling_decision): + def dummy_profiles_sampler(sampling_context): + assert sampling_context["transaction_context"] == { + "name": "dogpark", + "op": "op", + "source": "custom", + } + assert sampling_context["parent_sampled"] == parent_sampling_decision + return 1.0 + + sentry_init(traces_sample_rate=1.0, profiles_sampler=dummy_profiles_sampler) + + sentry_trace = "12312012123120121231201212312012-1121201211212012-{}".format( + int(parent_sampling_decision) + ) + with sentry_sdk.continue_trace({"sentry-trace": sentry_trace}): + with sentry_sdk.start_span(name="dogpark", op="op"): + pass From 2e239319669055525a1121a64b841d32d67ab3cf Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Tue, 3 Dec 2024 16:52:06 +0100 Subject: [PATCH 139/244] Fixed pymongo tests (#3839) --- tests/integrations/pymongo/test_pymongo.py | 14 +++++--------- 1 file changed, 5 insertions(+), 9 deletions(-) diff --git a/tests/integrations/pymongo/test_pymongo.py b/tests/integrations/pymongo/test_pymongo.py index 7ebfc1159c..75e946cda8 100644 --- a/tests/integrations/pymongo/test_pymongo.py +++ b/tests/integrations/pymongo/test_pymongo.py @@ -1,6 +1,6 @@ import re -from sentry_sdk import capture_message, start_transaction +import sentry_sdk from sentry_sdk.consts import SPANDATA from sentry_sdk.integrations.pymongo import PyMongoIntegration, _strip_pii @@ -37,7 +37,7 @@ def test_transactions(sentry_init, capture_events, mongo_server, with_pii): connection = MongoClient(mongo_server.uri) - with start_transaction(): + with sentry_sdk.start_span(): list( connection["test_db"]["test_collection"].find({"foobar": 1}) ) # force query execution @@ -119,13 +119,9 @@ def test_breadcrumbs( list( connection["test_db"]["test_collection"].find({"foobar": 1}) ) # force query execution - capture_message("hi") - - if traces_sample_rate: - event = events[1] - else: - event = events[0] + sentry_sdk.capture_message("hi") + (event,) = events (crumb,) = event["breadcrumbs"]["values"] assert crumb["category"] == "query" @@ -450,7 +446,7 @@ def test_span_origin(sentry_init, capture_events, mongo_server): connection = MongoClient(mongo_server.uri) - with start_transaction(): + with sentry_sdk.start_span(): list( connection["test_db"]["test_collection"].find({"foobar": 1}) ) # force query execution From 8b70a6643b7205c160d821eda011f10d76a29efd Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Tue, 3 Dec 2024 16:52:31 +0100 Subject: [PATCH 140/244] Fix potel tests in potel (#3841) * Make sure to add data before span is closed. some cleanup * Fixed some tests --- sentry_sdk/integrations/sqlalchemy.py | 8 +-- sentry_sdk/tracing_utils.py | 1 + .../sqlalchemy/test_sqlalchemy.py | 50 ++++++++----------- 3 files changed, 27 insertions(+), 32 deletions(-) diff --git a/sentry_sdk/integrations/sqlalchemy.py b/sentry_sdk/integrations/sqlalchemy.py index 0a54108e75..3a8731bea9 100644 --- a/sentry_sdk/integrations/sqlalchemy.py +++ b/sentry_sdk/integrations/sqlalchemy.py @@ -76,15 +76,15 @@ def _after_cursor_execute(conn, cursor, statement, parameters, context, *args): context, "_sentry_sql_span_manager", None ) # type: Optional[ContextManager[Any]] - if ctx_mgr is not None: - context._sentry_sql_span_manager = None - ctx_mgr.__exit__(None, None, None) - span = getattr(context, "_sentry_sql_span", None) # type: Optional[Span] if span is not None: with capture_internal_exceptions(): add_query_source(span) + if ctx_mgr is not None: + context._sentry_sql_span_manager = None + ctx_mgr.__exit__(None, None, None) + def _handle_error(context, *args): # type: (Any, *Any) -> None diff --git a/sentry_sdk/tracing_utils.py b/sentry_sdk/tracing_utils.py index 4e2f46c81a..e217994839 100644 --- a/sentry_sdk/tracing_utils.py +++ b/sentry_sdk/tracing_utils.py @@ -147,6 +147,7 @@ def record_sql_queries( op=OP.DB, name=query, origin=span_origin, + only_if_parent=True, ) as span: for k, v in data.items(): span.set_data(k, v) diff --git a/tests/integrations/sqlalchemy/test_sqlalchemy.py b/tests/integrations/sqlalchemy/test_sqlalchemy.py index 84657d8c8f..48390b352e 100644 --- a/tests/integrations/sqlalchemy/test_sqlalchemy.py +++ b/tests/integrations/sqlalchemy/test_sqlalchemy.py @@ -1,3 +1,4 @@ +import contextlib import os from datetime import datetime from unittest import mock @@ -11,7 +12,6 @@ from sqlalchemy import text import sentry_sdk -from sentry_sdk import capture_message, start_transaction from sentry_sdk.consts import DEFAULT_MAX_VALUE_LENGTH, SPANDATA from sentry_sdk.integrations.sqlalchemy import SqlalchemyIntegration from sentry_sdk.serializer import MAX_EVENT_BYTES @@ -54,7 +54,7 @@ class Address(Base): assert session.query(Person).first() == bob - capture_message("hi") + sentry_sdk.capture_message("hi") (event,) = events @@ -111,7 +111,7 @@ class Address(Base): Session = sessionmaker(bind=engine) # noqa: N806 session = Session() - with start_transaction(name="test_transaction", sampled=True): + with sentry_sdk.start_span(name="test_transaction", sampled=True): with session.begin_nested(): session.query(Person).first() @@ -135,7 +135,7 @@ class Address(Base): assert ( render_span_tree(event) == """\ -- op=null: description=null +- op="test_transaction": description=null - op="db": description="SAVEPOINT sa_savepoint_1" - op="db": description="SELECT person.id AS person_id, person.name AS person_name \\nFROM person\\n LIMIT ? OFFSET ?" - op="db": description="RELEASE SAVEPOINT sa_savepoint_1" @@ -185,7 +185,7 @@ class Address(Base): Session = sessionmaker(bind=engine) # noqa: N806 session = Session() - with start_transaction(name="test_transaction", sampled=True): + with sentry_sdk.start_span(name="test_transaction", sampled=True): with session.begin_nested(): session.query(Person).first() @@ -217,7 +217,7 @@ def test_long_sql_query_preserved(sentry_init, capture_events): engine = create_engine( "sqlite:///:memory:", connect_args={"check_same_thread": False} ) - with start_transaction(name="test"): + with sentry_sdk.start_span(name="test"): with engine.connect() as con: con.execute(text(" UNION ".join("SELECT {}".format(i) for i in range(100)))) @@ -246,7 +246,7 @@ def processor(event, hint): engine = create_engine( "sqlite:///:memory:", connect_args={"check_same_thread": False} ) - with start_transaction(name="test"): + with sentry_sdk.start_span(name="test"): with engine.connect() as con: for _ in range(1500): con.execute( @@ -306,7 +306,7 @@ def test_query_source_disabled(sentry_init, capture_events): events = capture_events() - with start_transaction(name="test_transaction", sampled=True): + with sentry_sdk.start_span(name="test_transaction", sampled=True): Base = declarative_base() # noqa: N806 class Person(Base): @@ -358,7 +358,7 @@ def test_query_source_enabled(sentry_init, capture_events, enable_db_query_sourc events = capture_events() - with start_transaction(name="test_transaction", sampled=True): + with sentry_sdk.start_span(name="test_transaction", sampled=True): Base = declarative_base() # noqa: N806 class Person(Base): @@ -405,7 +405,7 @@ def test_query_source(sentry_init, capture_events): ) events = capture_events() - with start_transaction(name="test_transaction", sampled=True): + with sentry_sdk.start_span(name="test_transaction", sampled=True): Base = declarative_base() # noqa: N806 class Person(Base): @@ -475,7 +475,7 @@ def test_query_source_with_module_in_search_path(sentry_init, capture_events): query_first_model_from_session, ) - with start_transaction(name="test_transaction", sampled=True): + with sentry_sdk.start_span(name="test_transaction", sampled=True): Base = declarative_base() # noqa: N806 class Person(Base): @@ -533,7 +533,7 @@ def test_no_query_source_if_duration_too_short(sentry_init, capture_events): ) events = capture_events() - with start_transaction(name="test_transaction", sampled=True): + with sentry_sdk.start_span(name="test_transaction", sampled=True): Base = declarative_base() # noqa: N806 class Person(Base): @@ -601,7 +601,7 @@ def test_query_source_if_duration_over_threshold(sentry_init, capture_events): ) events = capture_events() - with start_transaction(name="test_transaction", sampled=True): + with sentry_sdk.start_span(name="test_transaction", sampled=True): Base = declarative_base() # noqa: N806 class Person(Base): @@ -620,21 +620,15 @@ class Person(Base): bob = Person(name="Bob") session.add(bob) - class fake_record_sql_queries: # noqa: N801 - def __init__(self, *args, **kwargs): - with freeze_time(datetime(2024, 1, 1, microsecond=0)): - with record_sql_queries(*args, **kwargs) as span: - self.span = span - freezer = freeze_time(datetime(2024, 1, 1, microsecond=99999)) - freezer.start() + @contextlib.contextmanager + def fake_record_sql_queries(*args, **kwargs): # noqa: N801 + with freeze_time(datetime(2024, 1, 1, second=0)): + with record_sql_queries(*args, **kwargs) as span: + freezer = freeze_time(datetime(2024, 1, 1, second=1)) + freezer.start() + yield span - freezer.stop() - - def __enter__(self): - return self.span - - def __exit__(self, type, value, traceback): - pass + freezer.stop() with mock.patch( "sentry_sdk.integrations.sqlalchemy.record_sql_queries", @@ -687,7 +681,7 @@ def test_span_origin(sentry_init, capture_events): engine = create_engine( "sqlite:///:memory:", connect_args={"check_same_thread": False} ) - with start_transaction(name="foo"): + with sentry_sdk.start_span(name="foo"): with engine.connect() as con: con.execute(text("SELECT 0")) From a05086fb99455755b1359e3dd72b299ab27cbcf3 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Wed, 4 Dec 2024 09:06:02 +0100 Subject: [PATCH 141/244] Fix Redis tests for potel (#3838) * Make sure only spans are created, not transactions --- sentry_sdk/integrations/redis/_sync_common.py | 2 ++ .../redis/asyncio/test_redis_asyncio.py | 8 ++--- .../redis/cluster/test_redis_cluster.py | 12 +++---- .../test_redis_cluster_asyncio.py | 10 +++--- tests/integrations/redis/test_redis.py | 34 +++++++++---------- .../redis/test_redis_cache_module.py | 6 ++-- .../redis/test_redis_cache_module_async.py | 14 ++++---- tox.ini | 2 +- 8 files changed, 45 insertions(+), 43 deletions(-) diff --git a/sentry_sdk/integrations/redis/_sync_common.py b/sentry_sdk/integrations/redis/_sync_common.py index 63738ea7cb..c2509eea9c 100644 --- a/sentry_sdk/integrations/redis/_sync_common.py +++ b/sentry_sdk/integrations/redis/_sync_common.py @@ -88,6 +88,7 @@ def sentry_patched_execute_command(self, name, *args, **kwargs): op=cache_properties["op"], name=cache_properties["description"], origin=SPAN_ORIGIN, + only_if_parent=True, ) cache_span.__enter__() @@ -97,6 +98,7 @@ def sentry_patched_execute_command(self, name, *args, **kwargs): op=db_properties["op"], name=db_properties["description"], origin=SPAN_ORIGIN, + only_if_parent=True, ) db_span.__enter__() diff --git a/tests/integrations/redis/asyncio/test_redis_asyncio.py b/tests/integrations/redis/asyncio/test_redis_asyncio.py index 2a0b96b021..e735d478c9 100644 --- a/tests/integrations/redis/asyncio/test_redis_asyncio.py +++ b/tests/integrations/redis/asyncio/test_redis_asyncio.py @@ -1,6 +1,6 @@ import pytest -from sentry_sdk import capture_message, start_transaction +import sentry_sdk from sentry_sdk.consts import SPANDATA from sentry_sdk.integrations.redis import RedisIntegration from tests.conftest import ApproxDict @@ -16,7 +16,7 @@ async def test_async_basic(sentry_init, capture_events): connection = FakeRedis() await connection.get("foobar") - capture_message("hi") + sentry_sdk.capture_message("hi") (event,) = events (crumb,) = event["breadcrumbs"]["values"] @@ -54,7 +54,7 @@ async def test_async_redis_pipeline( events = capture_events() connection = FakeRedis() - with start_transaction(): + with sentry_sdk.start_span(): pipeline = connection.pipeline(transaction=is_transaction) pipeline.get("foo") pipeline.set("bar", 1) @@ -92,7 +92,7 @@ async def test_async_span_origin(sentry_init, capture_events): events = capture_events() connection = FakeRedis() - with start_transaction(name="custom_transaction"): + with sentry_sdk.start_span(name="custom_transaction"): # default case await connection.set("somekey", "somevalue") diff --git a/tests/integrations/redis/cluster/test_redis_cluster.py b/tests/integrations/redis/cluster/test_redis_cluster.py index 26feee1dae..43bd3e3392 100644 --- a/tests/integrations/redis/cluster/test_redis_cluster.py +++ b/tests/integrations/redis/cluster/test_redis_cluster.py @@ -1,7 +1,7 @@ import pytest -from sentry_sdk import capture_message + +import sentry_sdk from sentry_sdk.consts import SPANDATA -from sentry_sdk.api import start_transaction from sentry_sdk.integrations.redis import RedisIntegration from tests.conftest import ApproxDict @@ -27,7 +27,7 @@ def test_rediscluster_breadcrumb(sentry_init, capture_events): rc = redis.RedisCluster(host="localhost", port=6379) rc.get("foobar") - capture_message("hi") + sentry_sdk.capture_message("hi") (event,) = events crumbs = event["breadcrumbs"]["values"] @@ -68,7 +68,7 @@ def test_rediscluster_basic(sentry_init, capture_events, send_default_pii, descr ) events = capture_events() - with start_transaction(): + with sentry_sdk.start_span(): rc = redis.RedisCluster(host="localhost", port=6379) rc.set("bar", 1) @@ -117,7 +117,7 @@ def test_rediscluster_pipeline( events = capture_events() rc = redis.RedisCluster(host="localhost", port=6379) - with start_transaction(): + with sentry_sdk.start_span(): pipeline = rc.pipeline() pipeline.get("foo") pipeline.set("bar", 1) @@ -152,7 +152,7 @@ def test_rediscluster_span_origin(sentry_init, capture_events): events = capture_events() rc = redis.RedisCluster(host="localhost", port=6379) - with start_transaction(name="custom_transaction"): + with sentry_sdk.start_span(name="custom_transaction"): # default case rc.set("somekey", "somevalue") diff --git a/tests/integrations/redis/cluster_asyncio/test_redis_cluster_asyncio.py b/tests/integrations/redis/cluster_asyncio/test_redis_cluster_asyncio.py index b11808fb50..85970978dd 100644 --- a/tests/integrations/redis/cluster_asyncio/test_redis_cluster_asyncio.py +++ b/tests/integrations/redis/cluster_asyncio/test_redis_cluster_asyncio.py @@ -1,6 +1,6 @@ import pytest -from sentry_sdk import capture_message, start_transaction +import sentry_sdk from sentry_sdk.consts import SPANDATA from sentry_sdk.integrations.redis import RedisIntegration from tests.conftest import ApproxDict @@ -40,7 +40,7 @@ async def test_async_breadcrumb(sentry_init, capture_events): connection = cluster.RedisCluster(host="localhost", port=6379) await connection.get("foobar") - capture_message("hi") + sentry_sdk.capture_message("hi") (event,) = events (crumb,) = event["breadcrumbs"]["values"] @@ -78,7 +78,7 @@ async def test_async_basic(sentry_init, capture_events, send_default_pii, descri events = capture_events() connection = cluster.RedisCluster(host="localhost", port=6379) - with start_transaction(): + with sentry_sdk.start_span(): await connection.set("bar", 1) (event,) = events @@ -120,7 +120,7 @@ async def test_async_redis_pipeline( events = capture_events() connection = cluster.RedisCluster(host="localhost", port=6379) - with start_transaction(): + with sentry_sdk.start_span(): pipeline = connection.pipeline() pipeline.get("foo") pipeline.set("bar", 1) @@ -156,7 +156,7 @@ async def test_async_span_origin(sentry_init, capture_events): events = capture_events() connection = cluster.RedisCluster(host="localhost", port=6379) - with start_transaction(name="custom_transaction"): + with sentry_sdk.start_span(name="custom_transaction"): # default case await connection.set("somekey", "somevalue") diff --git a/tests/integrations/redis/test_redis.py b/tests/integrations/redis/test_redis.py index 0fec23f273..4afee93c59 100644 --- a/tests/integrations/redis/test_redis.py +++ b/tests/integrations/redis/test_redis.py @@ -3,7 +3,7 @@ import pytest from fakeredis import FakeStrictRedis -from sentry_sdk import capture_message, start_transaction +import sentry_sdk from sentry_sdk.consts import SPANDATA from sentry_sdk.integrations.redis import RedisIntegration @@ -23,7 +23,7 @@ def test_basic(sentry_init, capture_events): connection = FakeStrictRedis() connection.get("foobar") - capture_message("hi") + sentry_sdk.capture_message("hi") (event,) = events (crumb,) = event["breadcrumbs"]["values"] @@ -60,7 +60,7 @@ def test_redis_pipeline( events = capture_events() connection = FakeStrictRedis() - with start_transaction(): + with sentry_sdk.start_span(): pipeline = connection.pipeline(transaction=is_transaction) pipeline.get("foo") pipeline.set("bar", 1) @@ -94,7 +94,7 @@ def test_sensitive_data(sentry_init, capture_events, render_span_tree): events = capture_events() connection = FakeStrictRedis() - with start_transaction(): + with sentry_sdk.start_span(): connection.get( "this is super secret" ) # because fakeredis does not support AUTH we use GET instead @@ -103,7 +103,7 @@ def test_sensitive_data(sentry_init, capture_events, render_span_tree): assert ( render_span_tree(event) == """\ -- op="": description=null +- op="": description=null - op="db.redis": description="GET [Filtered]"\ """ ) @@ -117,7 +117,7 @@ def test_pii_data_redacted(sentry_init, capture_events, render_span_tree): events = capture_events() connection = FakeStrictRedis() - with start_transaction(): + with sentry_sdk.start_span(): connection.set("somekey1", "my secret string1") connection.set("somekey2", "my secret string2") connection.get("somekey2") @@ -127,7 +127,7 @@ def test_pii_data_redacted(sentry_init, capture_events, render_span_tree): assert ( render_span_tree(event) == """\ -- op="": description=null +- op="": description=null - op="db.redis": description="SET 'somekey1' [Filtered]" - op="db.redis": description="SET 'somekey2' [Filtered]" - op="db.redis": description="GET 'somekey2'" @@ -145,7 +145,7 @@ def test_pii_data_sent(sentry_init, capture_events, render_span_tree): events = capture_events() connection = FakeStrictRedis() - with start_transaction(): + with sentry_sdk.start_span(): connection.set("somekey1", "my secret string1") connection.set("somekey2", "my secret string2") connection.get("somekey2") @@ -155,7 +155,7 @@ def test_pii_data_sent(sentry_init, capture_events, render_span_tree): assert ( render_span_tree(event) == """\ -- op="": description=null +- op="": description=null - op="db.redis": description="SET 'somekey1' 'my secret string1'" - op="db.redis": description="SET 'somekey2' 'my secret string2'" - op="db.redis": description="GET 'somekey2'" @@ -173,7 +173,7 @@ def test_data_truncation(sentry_init, capture_events, render_span_tree): events = capture_events() connection = FakeStrictRedis() - with start_transaction(): + with sentry_sdk.start_span(): long_string = "a" * 100000 connection.set("somekey1", long_string) short_string = "b" * 10 @@ -183,7 +183,7 @@ def test_data_truncation(sentry_init, capture_events, render_span_tree): assert ( render_span_tree(event) == f"""\ -- op="": description=null +- op="": description=null - op="db.redis": description="SET 'somekey1' '{long_string[: 1024 - len("...") - len("SET 'somekey1' '")]}..." - op="db.redis": description="SET 'somekey2' 'bbbbbbbbbb'"\ """ # noqa: E221 @@ -199,7 +199,7 @@ def test_data_truncation_custom(sentry_init, capture_events, render_span_tree): events = capture_events() connection = FakeStrictRedis() - with start_transaction(): + with sentry_sdk.start_span(): long_string = "a" * 100000 connection.set("somekey1", long_string) short_string = "b" * 10 @@ -209,7 +209,7 @@ def test_data_truncation_custom(sentry_init, capture_events, render_span_tree): assert ( render_span_tree(event) == f"""\ -- op="": description=null +- op="": description=null - op="db.redis": description="SET 'somekey1' '{long_string[: 30 - len("...") - len("SET 'somekey1' '")]}..." - op="db.redis": description="SET 'somekey2' '{short_string}'"\ """ # noqa: E221 @@ -230,7 +230,7 @@ def test_breadcrumbs(sentry_init, capture_events): short_string = "b" * 10 connection.set("somekey2", short_string) - capture_message("hi") + sentry_sdk.capture_message("hi") (event,) = events crumbs = event["breadcrumbs"]["values"] @@ -268,7 +268,7 @@ def test_db_connection_attributes_client(sentry_init, capture_events): ) events = capture_events() - with start_transaction(): + with sentry_sdk.start_span(): connection = FakeStrictRedis(connection_pool=MOCK_CONNECTION_POOL) connection.get("foobar") @@ -290,7 +290,7 @@ def test_db_connection_attributes_pipeline(sentry_init, capture_events): ) events = capture_events() - with start_transaction(): + with sentry_sdk.start_span(): connection = FakeStrictRedis(connection_pool=MOCK_CONNECTION_POOL) pipeline = connection.pipeline(transaction=False) pipeline.get("foo") @@ -317,7 +317,7 @@ def test_span_origin(sentry_init, capture_events): events = capture_events() connection = FakeStrictRedis() - with start_transaction(name="custom_transaction"): + with sentry_sdk.start_span(name="custom_transaction"): # default case connection.set("somekey", "somevalue") diff --git a/tests/integrations/redis/test_redis_cache_module.py b/tests/integrations/redis/test_redis_cache_module.py index 68f915c2e5..e02b1ec31a 100644 --- a/tests/integrations/redis/test_redis_cache_module.py +++ b/tests/integrations/redis/test_redis_cache_module.py @@ -31,7 +31,7 @@ def test_no_cache_basic(sentry_init, capture_events, render_span_tree): assert ( render_span_tree(event) == """\ -- op="": description=null +- op="": description=null - op="db.redis": description="GET 'mycachekey'"\ """ ) @@ -61,7 +61,7 @@ def test_cache_basic(sentry_init, capture_events, render_span_tree): assert ( render_span_tree(event) == """\ -- op="": description=null +- op="": description=null - op="db.redis": description="HGET 'mycachekey' [Filtered]" - op="cache.get": description="mycachekey" - op="db.redis": description="GET 'mycachekey'" @@ -97,7 +97,7 @@ def test_cache_keys(sentry_init, capture_events, render_span_tree): assert ( render_span_tree(event) == """\ -- op="": description=null +- op="": description=null - op="db.redis": description="GET 'somethingelse'" - op="cache.get": description="blub" - op="db.redis": description="GET 'blub'" diff --git a/tests/integrations/redis/test_redis_cache_module_async.py b/tests/integrations/redis/test_redis_cache_module_async.py index a6ea06a973..d4ce4936bb 100644 --- a/tests/integrations/redis/test_redis_cache_module_async.py +++ b/tests/integrations/redis/test_redis_cache_module_async.py @@ -31,14 +31,14 @@ async def test_no_cache_basic(sentry_init, capture_events, render_span_tree): events = capture_events() connection = FakeRedisAsync() - with sentry_sdk.start_transaction(): + with sentry_sdk.start_span(): await connection.get("myasynccachekey") (event,) = events assert ( render_span_tree(event) == """\ -- op="": description=null +- op="": description=null - op="db.redis": description="GET 'myasynccachekey'"\ """ ) @@ -57,14 +57,14 @@ async def test_cache_basic(sentry_init, capture_events, render_span_tree): events = capture_events() connection = FakeRedisAsync() - with sentry_sdk.start_transaction(): + with sentry_sdk.start_span(): await connection.get("myasynccachekey") (event,) = events assert ( render_span_tree(event) == """\ -- op="": description=null +- op="": description=null - op="cache.get": description="myasynccachekey" - op="db.redis": description="GET 'myasynccachekey'"\ """ @@ -84,7 +84,7 @@ async def test_cache_keys(sentry_init, capture_events, render_span_tree): events = capture_events() connection = FakeRedisAsync() - with sentry_sdk.start_transaction(): + with sentry_sdk.start_span(): await connection.get("asomethingelse") await connection.get("ablub") await connection.get("ablubkeything") @@ -94,7 +94,7 @@ async def test_cache_keys(sentry_init, capture_events, render_span_tree): assert ( render_span_tree(event) == """\ -- op="": description=null +- op="": description=null - op="db.redis": description="GET 'asomethingelse'" - op="cache.get": description="ablub" - op="db.redis": description="GET 'ablub'" @@ -118,7 +118,7 @@ async def test_cache_data(sentry_init, capture_events): events = capture_events() connection = FakeRedisAsync(host="mycacheserver.io", port=6378) - with sentry_sdk.start_transaction(): + with sentry_sdk.start_span(): await connection.get("myasynccachekey") await connection.set("myasynccachekey", "事实胜于雄辩") await connection.get("myasynccachekey") diff --git a/tox.ini b/tox.ini index 60036adf98..ad1591f91e 100644 --- a/tox.ini +++ b/tox.ini @@ -403,7 +403,7 @@ deps = # Django django: psycopg2-binary - django-v{1.11,2.0,2.1,2.2,3.0,3.1,3.2}: djangorestframework>=3.0.0,<4.0.0 + django-v{1.11,2.0,2.2,3.0,3.1,3.2}: djangorestframework>=3.0.0,<4.0.0 django-v{2.0,2.2,3.0,3.2,4.0,4.1,4.2,5.0,5.1}: channels[daphne] django-v{2.2,3.0}: six django-v{1.11,2.0,2.2,3.0,3.2}: Werkzeug<2.1.0 From cf598e8c6185ea0be22c795cafabaf5deb07c191 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Wed, 4 Dec 2024 19:34:40 +0100 Subject: [PATCH 142/244] Make test work in potel (#3848) --- tests/integrations/cohere/test_cohere.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/integrations/cohere/test_cohere.py b/tests/integrations/cohere/test_cohere.py index 20371029d5..672d71b6b3 100644 --- a/tests/integrations/cohere/test_cohere.py +++ b/tests/integrations/cohere/test_cohere.py @@ -152,7 +152,7 @@ def test_bad_chat(sentry_init, capture_events): with pytest.raises(httpx.HTTPError): client.chat(model="some-model", message="hello") - (event, _) = events + (event,) = events assert event["level"] == "error" From 1dbcdd4c9bb33962e8b5163a98c745c025ed9bfe Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Wed, 4 Dec 2024 19:36:49 +0100 Subject: [PATCH 143/244] Fixed tests for boto3 (#3844) --- sentry_sdk/integrations/boto3.py | 13 ++++++++----- tests/integrations/boto3/test_s3.py | 12 ++++++------ 2 files changed, 14 insertions(+), 11 deletions(-) diff --git a/sentry_sdk/integrations/boto3.py b/sentry_sdk/integrations/boto3.py index dfea7459c3..04eac8c84f 100644 --- a/sentry_sdk/integrations/boto3.py +++ b/sentry_sdk/integrations/boto3.py @@ -116,20 +116,19 @@ def _sentry_after_call(context, parsed, **kwargs): data=span_data, ) - span.__exit__(None, None, None) - body = parsed.get("Body") if not isinstance(body, StreamingBody): + span.__exit__(None, None, None) return - streaming_span = span.start_child( + streaming_span = sentry_sdk.start_span( op=OP.HTTP_CLIENT_STREAM, - name=span.description, + name=span.name, origin=Boto3Integration.origin, + only_if_parent=True, ) orig_read = body.read - orig_close = body.close def sentry_streaming_body_read(*args, **kwargs): # type: (*Any, **Any) -> bytes @@ -144,6 +143,8 @@ def sentry_streaming_body_read(*args, **kwargs): body.read = sentry_streaming_body_read + orig_close = body.close + def sentry_streaming_body_close(*args, **kwargs): # type: (*Any, **Any) -> None streaming_span.finish() @@ -151,6 +152,8 @@ def sentry_streaming_body_close(*args, **kwargs): body.close = sentry_streaming_body_close + span.__exit__(None, None, None) + def _sentry_after_call_error(context, exception, **kwargs): # type: (Dict[str, Any], Type[BaseException], **Any) -> None diff --git a/tests/integrations/boto3/test_s3.py b/tests/integrations/boto3/test_s3.py index 668e8349b6..71dc5ccc07 100644 --- a/tests/integrations/boto3/test_s3.py +++ b/tests/integrations/boto3/test_s3.py @@ -21,7 +21,7 @@ def test_basic(sentry_init, capture_events): events = capture_events() s3 = session.resource("s3") - with sentry_sdk.start_transaction() as transaction, MockResponse( + with sentry_sdk.start_span() as transaction, MockResponse( s3.meta.client, 200, {}, read_fixture("s3_list.xml") ): bucket = s3.Bucket("bucket") @@ -45,7 +45,7 @@ def test_breadcrumb(sentry_init, capture_events): try: s3 = session.resource("s3") - with sentry_sdk.start_transaction(), MockResponse( + with sentry_sdk.start_span(), MockResponse( s3.meta.client, 200, {}, read_fixture("s3_list.xml") ): bucket = s3.Bucket("bucket") @@ -75,7 +75,7 @@ def test_streaming(sentry_init, capture_events): events = capture_events() s3 = session.resource("s3") - with sentry_sdk.start_transaction() as transaction, MockResponse( + with sentry_sdk.start_span() as transaction, MockResponse( s3.meta.client, 200, {}, b"hello" ): obj = s3.Bucket("bucket").Object("foo.pdf") @@ -113,7 +113,7 @@ def test_streaming_close(sentry_init, capture_events): events = capture_events() s3 = session.resource("s3") - with sentry_sdk.start_transaction() as transaction, MockResponse( + with sentry_sdk.start_span() as transaction, MockResponse( s3.meta.client, 200, {}, b"hello" ): obj = s3.Bucket("bucket").Object("foo.pdf") @@ -142,7 +142,7 @@ def test_omit_url_data_if_parsing_fails(sentry_init, capture_events): "sentry_sdk.integrations.boto3.parse_url", side_effect=ValueError, ): - with sentry_sdk.start_transaction() as transaction, MockResponse( + with sentry_sdk.start_span() as transaction, MockResponse( s3.meta.client, 200, {}, read_fixture("s3_list.xml") ): bucket = s3.Bucket("bucket") @@ -170,7 +170,7 @@ def test_span_origin(sentry_init, capture_events): events = capture_events() s3 = session.resource("s3") - with sentry_sdk.start_transaction(), MockResponse( + with sentry_sdk.start_span(), MockResponse( s3.meta.client, 200, {}, read_fixture("s3_list.xml") ): bucket = s3.Bucket("bucket") From c4f2a2a2aa98c07a77253a69dbb05a5e95832752 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Wed, 4 Dec 2024 19:37:25 +0100 Subject: [PATCH 144/244] Fix Huey tests to work with POTel (#3843) --- sentry_sdk/integrations/huey.py | 2 +- sentry_sdk/tracing.py | 7 ++++++- tests/integrations/huey/test_huey.py | 8 ++++---- 3 files changed, 11 insertions(+), 6 deletions(-) diff --git a/sentry_sdk/integrations/huey.py b/sentry_sdk/integrations/huey.py index f12c63705b..4dcff8513f 100644 --- a/sentry_sdk/integrations/huey.py +++ b/sentry_sdk/integrations/huey.py @@ -162,7 +162,7 @@ def _sentry_execute(self, task, timestamp=None): sentry_headers = task.kwargs.pop("sentry_headers", {}) with sentry_sdk.continue_trace(sentry_headers): - with sentry_sdk.start_transaction( + with sentry_sdk.start_span( name=task.name, op=OP.QUEUE_TASK_HUEY, source=TRANSACTION_SOURCE_TASK, diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py index 6bd42983f2..6728b9b4c9 100644 --- a/sentry_sdk/tracing.py +++ b/sentry_sdk/tracing.py @@ -1313,7 +1313,12 @@ def __exit__(self, ty, value, tb): if value is not None: self.set_status(SPANSTATUS.INTERNAL_ERROR) else: - self.set_status(SPANSTATUS.OK) + status_unset = ( + hasattr(self._otel_span, "status") + and self._otel_span.status.status_code == StatusCode.UNSET + ) + if status_unset: + self.set_status(SPANSTATUS.OK) self.finish() context.detach(self._ctx_token) diff --git a/tests/integrations/huey/test_huey.py b/tests/integrations/huey/test_huey.py index 143a369348..bdd5c2ca10 100644 --- a/tests/integrations/huey/test_huey.py +++ b/tests/integrations/huey/test_huey.py @@ -1,7 +1,7 @@ import pytest from decimal import DivisionByZero -from sentry_sdk import start_transaction +import sentry_sdk from sentry_sdk.integrations.huey import HueyIntegration from sentry_sdk.utils import parse_version @@ -160,7 +160,7 @@ def dummy_task(): events = capture_events() - with start_transaction() as transaction: + with sentry_sdk.start_span() as transaction: dummy_task() (event,) = events @@ -182,7 +182,7 @@ def test_huey_propagate_trace(init_huey, capture_events): def propagated_trace_task(): pass - with start_transaction() as outer_transaction: + with sentry_sdk.start_span() as outer_transaction: execute_huey_task(huey, propagated_trace_task) assert ( @@ -200,7 +200,7 @@ def dummy_task(): events = capture_events() - with start_transaction(): + with sentry_sdk.start_span(): dummy_task() (event,) = events From abd4baa946b0a9c79b1e21b408c9419d4568628d Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Wed, 4 Dec 2024 20:15:29 +0100 Subject: [PATCH 145/244] Fix status related tests again --- tests/integrations/opentelemetry/test_propagator.py | 13 +++++-------- tests/test_api.py | 4 ++-- tests/tracing/test_integration_tests.py | 2 +- 3 files changed, 8 insertions(+), 11 deletions(-) diff --git a/tests/integrations/opentelemetry/test_propagator.py b/tests/integrations/opentelemetry/test_propagator.py index b318dccdf7..ef952ea50a 100644 --- a/tests/integrations/opentelemetry/test_propagator.py +++ b/tests/integrations/opentelemetry/test_propagator.py @@ -1,6 +1,5 @@ import pytest -from unittest import mock from unittest.mock import MagicMock from opentelemetry.trace.propagation import get_current_span @@ -12,8 +11,6 @@ SENTRY_TRACE_KEY, ) from sentry_sdk.integrations.opentelemetry.propagator import SentryPropagator -from sentry_sdk.integrations.opentelemetry.span_processor import SentrySpanProcessor -from sentry_sdk.tracing_utils import Baggage @pytest.mark.forked @@ -23,7 +20,7 @@ def test_extract_no_context_no_sentry_trace_header(): Extract should return empty context. """ carrier = None - context = None + context = {} getter = MagicMock() getter.get.return_value = None @@ -144,8 +141,8 @@ def test_inject_continue_trace(sentry_init, SortedBaggage): with sentry_sdk.continue_trace(incoming_headers): with sentry_sdk.start_span(name="foo") as span: SentryPropagator().inject(carrier, setter=setter) - assert(carrier["sentry-trace"]) == f"{trace_id}-{span.span_id}-1" - assert(carrier["baggage"]) == SortedBaggage(baggage) + assert (carrier["sentry-trace"]) == f"{trace_id}-{span.span_id}-1" + assert (carrier["baggage"]) == SortedBaggage(baggage) def test_inject_head_sdk(sentry_init, SortedBaggage): @@ -156,7 +153,7 @@ def test_inject_head_sdk(sentry_init, SortedBaggage): with sentry_sdk.start_span(name="foo") as span: SentryPropagator().inject(carrier, setter=setter) - assert(carrier["sentry-trace"]) == f"{span.trace_id}-{span.span_id}-1" - assert(carrier["baggage"]) == SortedBaggage( + assert (carrier["sentry-trace"]) == f"{span.trace_id}-{span.span_id}-1" + assert (carrier["baggage"]) == SortedBaggage( f"sentry-transaction=foo,sentry-release=release,sentry-environment=production,sentry-trace_id={span.trace_id},sentry-sample_rate=1.0,sentry-sampled=true" ) diff --git a/tests/test_api.py b/tests/test_api.py index 1be69d4a84..6c81c93d21 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -91,8 +91,8 @@ def test_baggage_with_tracing_disabled(sentry_init, SortedBaggage): @pytest.mark.forked def test_baggage_with_tracing_enabled(sentry_init, SortedBaggage): sentry_init(traces_sample_rate=1.0, release="1.0.0", environment="dev") - with start_span() as span: - expected_baggage = "sentry-trace_id={},sentry-environment=dev,sentry-release=1.0.0,sentry-sample_rate=1.0,sentry-sampled={}".format( + with start_span(name="foo") as span: + expected_baggage = "sentry-transaction=foo,sentry-trace_id={},sentry-environment=dev,sentry-release=1.0.0,sentry-sample_rate=1.0,sentry-sampled={}".format( span.trace_id, "true" if span.sampled else "false" ) assert get_baggage() == SortedBaggage(expected_baggage) diff --git a/tests/tracing/test_integration_tests.py b/tests/tracing/test_integration_tests.py index 3a4bef77fb..d6f306b0fd 100644 --- a/tests/tracing/test_integration_tests.py +++ b/tests/tracing/test_integration_tests.py @@ -39,7 +39,7 @@ def test_basic(sentry_init, capture_events, sample_rate): assert span1["status"] == "internal_error" assert span1["op"] == "foo" assert span1["description"] == "foodesc" - assert "status" not in span2.get("tags", {}) + assert span2["tags"]["status"] == "ok" assert span2["op"] == "bar" assert span2["description"] == "bardesc" assert parent_span["transaction"] == "hi" From 65fbd508f45371d0b5d6a6d888819c69b3777075 Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Thu, 5 Dec 2024 14:22:04 +0100 Subject: [PATCH 146/244] Move scope context init outside integration (#3850) * Move scope context init outside integration * Fix ThreadingIntegration by carrying forward span reference in (#3851) `use_scope` Since the otel context span reference is the source of truth for the current active span, we need to explicitly pass the span reference on the scope through when we use `use_scope` since we are changing context variables in the other thread. Also, * fixes some typing in the original scope * adds more types to the `contextvars_context` manager --- MIGRATION_GUIDE.md | 1 + sentry_sdk/_init_implementation.py | 2 ++ .../opentelemetry/contextvars_context.py | 32 +++++++++++++++++-- .../integrations/opentelemetry/integration.py | 13 -------- .../integrations/opentelemetry/scope.py | 12 ++++++- sentry_sdk/integrations/threading.py | 20 +++--------- sentry_sdk/scope.py | 7 ++-- sentry_sdk/tracing.py | 4 +-- tests/conftest.py | 8 +++-- .../integrations/threading/test_threading.py | 20 ++++++------ tests/test_scope.py | 6 ++-- 11 files changed, 72 insertions(+), 53 deletions(-) diff --git a/MIGRATION_GUIDE.md b/MIGRATION_GUIDE.md index 1c0fa76fb0..6f0aeb4510 100644 --- a/MIGRATION_GUIDE.md +++ b/MIGRATION_GUIDE.md @@ -138,6 +138,7 @@ Looking to upgrade from Sentry SDK 2.x to 3.x? Here's a comprehensive list of wh - `span.containing_transaction` has been removed. Use `span.root_span` instead. - `continue_from_headers`, `continue_from_environ` and `from_traceparent` have been removed, please use top-level API `sentry_sdk.continue_trace` instead. - `PropagationContext` constructor no longer takes a `dynamic_sampling_context` but takes a `baggage` object instead. +- `ThreadingIntegration` no longer takes the `propagate_hub` argument. ### Deprecated diff --git a/sentry_sdk/_init_implementation.py b/sentry_sdk/_init_implementation.py index dc235af243..74bbd9a20f 100644 --- a/sentry_sdk/_init_implementation.py +++ b/sentry_sdk/_init_implementation.py @@ -1,6 +1,7 @@ from typing import TYPE_CHECKING import sentry_sdk +from sentry_sdk.integrations.opentelemetry.scope import setup_scope_context_management if TYPE_CHECKING: from typing import Any, Optional @@ -24,6 +25,7 @@ def _init(*args, **kwargs): """ client = sentry_sdk.Client(*args, **kwargs) sentry_sdk.get_global_scope().set_client(client) + setup_scope_context_management() _check_python_deprecations() diff --git a/sentry_sdk/integrations/opentelemetry/contextvars_context.py b/sentry_sdk/integrations/opentelemetry/contextvars_context.py index b66b10d18a..8025f26ba8 100644 --- a/sentry_sdk/integrations/opentelemetry/contextvars_context.py +++ b/sentry_sdk/integrations/opentelemetry/contextvars_context.py @@ -1,3 +1,6 @@ +from typing import cast, TYPE_CHECKING + +from opentelemetry.trace import set_span_in_context from opentelemetry.context import Context, get_value, set_value from opentelemetry.context.contextvars_context import ContextVarsRuntimeContext @@ -9,25 +12,50 @@ SENTRY_USE_ISOLATION_SCOPE_KEY, ) +if TYPE_CHECKING: + from typing import Optional + from sentry_sdk.integrations.opentelemetry.scope import PotelScope + class SentryContextVarsRuntimeContext(ContextVarsRuntimeContext): def attach(self, context): # type: (Context) -> object scopes = get_value(SENTRY_SCOPES_KEY, context) + should_fork_isolation_scope = context.pop( SENTRY_FORK_ISOLATION_SCOPE_KEY, False ) + should_fork_isolation_scope = cast("bool", should_fork_isolation_scope) + should_use_isolation_scope = context.pop(SENTRY_USE_ISOLATION_SCOPE_KEY, None) + should_use_isolation_scope = cast( + "Optional[PotelScope]", should_use_isolation_scope + ) + should_use_current_scope = context.pop(SENTRY_USE_CURRENT_SCOPE_KEY, None) + should_use_current_scope = cast( + "Optional[PotelScope]", should_use_current_scope + ) - if scopes and isinstance(scopes, tuple): + if scopes: + scopes = cast("tuple[PotelScope, PotelScope]", scopes) (current_scope, isolation_scope) = scopes else: current_scope = sentry_sdk.get_current_scope() isolation_scope = sentry_sdk.get_isolation_scope() + new_context = context + if should_use_current_scope: new_scope = should_use_current_scope + + # the main case where we use use_scope is for + # scope propagation in the ThreadingIntegration + # so we need to carry forward the span reference explicitly too + span = should_use_current_scope.span + if span: + new_context = set_span_in_context(span._otel_span, new_context) + else: new_scope = current_scope.fork() @@ -40,5 +68,5 @@ def attach(self, context): new_scopes = (new_scope, new_isolation_scope) - new_context = set_value(SENTRY_SCOPES_KEY, new_scopes, context) + new_context = set_value(SENTRY_SCOPES_KEY, new_scopes, new_context) return super().attach(new_context) diff --git a/sentry_sdk/integrations/opentelemetry/integration.py b/sentry_sdk/integrations/opentelemetry/integration.py index 013575dfa7..551ef48891 100644 --- a/sentry_sdk/integrations/opentelemetry/integration.py +++ b/sentry_sdk/integrations/opentelemetry/integration.py @@ -5,14 +5,10 @@ """ from sentry_sdk.integrations import DidNotEnable, Integration -from sentry_sdk.integrations.opentelemetry.scope import setup_initial_scopes from sentry_sdk.integrations.opentelemetry.propagator import SentryPropagator from sentry_sdk.integrations.opentelemetry.span_processor import ( SentrySpanProcessor, ) -from sentry_sdk.integrations.opentelemetry.contextvars_context import ( - SentryContextVarsRuntimeContext, -) from sentry_sdk.integrations.opentelemetry.sampler import SentrySampler from sentry_sdk.utils import logger @@ -45,7 +41,6 @@ def setup_once(): "Use at your own risk." ) - _setup_scope_context_management() _setup_sentry_tracing() _patch_readable_span() # _setup_instrumentors() @@ -70,14 +65,6 @@ def sentry_patched_readable_span(self): Span._readable_span = sentry_patched_readable_span -def _setup_scope_context_management(): - # type: () -> None - import opentelemetry.context - - opentelemetry.context._RUNTIME_CONTEXT = SentryContextVarsRuntimeContext() - setup_initial_scopes() - - def _setup_sentry_tracing(): # type: () -> None provider = TracerProvider(sampler=SentrySampler()) diff --git a/sentry_sdk/integrations/opentelemetry/scope.py b/sentry_sdk/integrations/opentelemetry/scope.py index 2e12cb53d4..89da1af68c 100644 --- a/sentry_sdk/integrations/opentelemetry/scope.py +++ b/sentry_sdk/integrations/opentelemetry/scope.py @@ -2,7 +2,6 @@ from contextlib import contextmanager from opentelemetry.context import ( - Context, get_value, set_value, attach, @@ -24,6 +23,9 @@ SENTRY_USE_ISOLATION_SCOPE_KEY, TRACESTATE_SAMPLED_KEY, ) +from sentry_sdk.integrations.opentelemetry.contextvars_context import ( + SentryContextVarsRuntimeContext, +) from sentry_sdk.integrations.opentelemetry.utils import trace_state_from_baggage from sentry_sdk.scope import Scope, ScopeType from sentry_sdk.tracing import POTelSpan @@ -152,6 +154,14 @@ def setup_initial_scopes(): attach(set_value(SENTRY_SCOPES_KEY, scopes)) +def setup_scope_context_management(): + # type: () -> None + import opentelemetry.context + + opentelemetry.context._RUNTIME_CONTEXT = SentryContextVarsRuntimeContext() + setup_initial_scopes() + + @contextmanager def isolation_scope(): # type: () -> Generator[Scope, None, None] diff --git a/sentry_sdk/integrations/threading.py b/sentry_sdk/integrations/threading.py index 99bfc66611..33cdd0d0be 100644 --- a/sentry_sdk/integrations/threading.py +++ b/sentry_sdk/integrations/threading.py @@ -7,7 +7,6 @@ from sentry_sdk.utils import ( event_from_exception, capture_internal_exceptions, - logger, reraise, ) @@ -27,22 +26,10 @@ class ThreadingIntegration(Integration): identifier = "threading" - def __init__(self, propagate_hub=None, propagate_scope=True): - # type: (Optional[bool], bool) -> None - if propagate_hub is not None: - logger.warning( - "Deprecated: propagate_hub is deprecated. This will be removed in the future." - ) - - # Note: propagate_hub did not have any effect on propagation of scope data - # scope data was always propagated no matter what the value of propagate_hub was - # This is why the default for propagate_scope is True - + def __init__(self, propagate_scope=True): + # type: (bool) -> None self.propagate_scope = propagate_scope - if propagate_hub is not None: - self.propagate_scope = propagate_hub - @staticmethod def setup_once(): # type: () -> None @@ -99,7 +86,8 @@ def _run_old_run_func(): with sentry_sdk.use_scope(current_scope_to_use): return _run_old_run_func() else: - return _run_old_run_func() + with sentry_sdk.isolation_scope(): + return _run_old_run_func() return run # type: ignore diff --git a/sentry_sdk/scope.py b/sentry_sdk/scope.py index 54e6fc8928..7083c3709c 100644 --- a/sentry_sdk/scope.py +++ b/sentry_sdk/scope.py @@ -26,6 +26,7 @@ SENTRY_TRACE_HEADER_NAME, NoOpSpan, Span, + POTelSpan, Transaction, ) from sentry_sdk.utils import ( @@ -669,7 +670,7 @@ def clear(self): self.clear_breadcrumbs() self._should_capture = True # type: bool - self._span = None # type: Optional[Span] + self._span = None # type: Optional[POTelSpan] self._session = None # type: Optional[Session] self._force_auto_session_tracking = None # type: Optional[bool] @@ -777,13 +778,13 @@ def set_user(self, value): @property def span(self): - # type: () -> Optional[Span] + # type: () -> Optional[POTelSpan] """Get current tracing span.""" return self._span @span.setter def span(self, span): - # type: (Optional[Span]) -> None + # type: (Optional[POTelSpan]) -> None """Set current tracing span.""" self._span = span diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py index 6728b9b4c9..7686dcf052 100644 --- a/sentry_sdk/tracing.py +++ b/sentry_sdk/tracing.py @@ -1280,11 +1280,11 @@ def __eq__(self, other): def __repr__(self): # type: () -> str return ( - "<%s(op=%r, description:%r, trace_id=%r, span_id=%r, parent_span_id=%r, sampled=%r, origin=%r)>" + "<%s(op=%r, name:%r, trace_id=%r, span_id=%r, parent_span_id=%r, sampled=%r, origin=%r)>" % ( self.__class__.__name__, self.op, - self.description, + self.name, self.trace_id, self.span_id, self.parent_span_id, diff --git a/tests/conftest.py b/tests/conftest.py index 18b3ec3576..a32ebd5eb1 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -62,7 +62,10 @@ def benchmark(): from sentry_sdk import scope -import sentry_sdk.integrations.opentelemetry.scope as potel_scope +from sentry_sdk.integrations.opentelemetry.scope import ( + setup_scope_context_management, + setup_initial_scopes, +) @pytest.fixture(autouse=True) @@ -74,7 +77,7 @@ def clean_scopes(): scope._isolation_scope.set(None) scope._current_scope.set(None) - potel_scope.setup_initial_scopes() + setup_initial_scopes() @pytest.fixture(autouse=True) @@ -187,6 +190,7 @@ def inner(*a, **kw): kw.setdefault("transport", TestTransport()) client = sentry_sdk.Client(*a, **kw) sentry_sdk.get_global_scope().set_client(client) + setup_scope_context_management() if request.node.get_closest_marker("forked"): # Do not run isolation if the test is already running in diff --git a/tests/integrations/threading/test_threading.py b/tests/integrations/threading/test_threading.py index 0d14fae352..75b3b7eea1 100644 --- a/tests/integrations/threading/test_threading.py +++ b/tests/integrations/threading/test_threading.py @@ -35,11 +35,11 @@ def crash(): assert not events -@pytest.mark.parametrize("propagate_hub", (True, False)) -def test_propagates_hub(sentry_init, capture_events, propagate_hub): +@pytest.mark.parametrize("propagate_scope", (True, False)) +def test_propagates_scope(sentry_init, capture_events, propagate_scope): sentry_init( default_integrations=False, - integrations=[ThreadingIntegration(propagate_hub=propagate_hub)], + integrations=[ThreadingIntegration(propagate_scope=propagate_scope)], ) events = capture_events() @@ -65,25 +65,25 @@ def stage2(): assert exception["mechanism"]["type"] == "threading" assert not exception["mechanism"]["handled"] - if propagate_hub: + if propagate_scope: assert event["tags"]["stage1"] == "true" else: assert "stage1" not in event.get("tags", {}) -@pytest.mark.parametrize("propagate_hub", (True, False)) -def test_propagates_threadpool_hub(sentry_init, capture_events, propagate_hub): +@pytest.mark.parametrize("propagate_scope", (True, False)) +def test_propagates_threadpool_scope(sentry_init, capture_events, propagate_scope): sentry_init( traces_sample_rate=1.0, - integrations=[ThreadingIntegration(propagate_hub=propagate_hub)], + integrations=[ThreadingIntegration(propagate_scope=propagate_scope)], ) events = capture_events() def double(number): - with sentry_sdk.start_span(op="task", name=str(number)): + with sentry_sdk.start_span(op="task", name=str(number), only_if_parent=True): return number * 2 - with sentry_sdk.start_transaction(name="test_handles_threadpool"): + with sentry_sdk.start_span(name="test_handles_threadpool"): with futures.ThreadPoolExecutor(max_workers=1) as executor: tasks = [executor.submit(double, number) for number in [1, 2, 3, 4]] for future in futures.as_completed(tasks): @@ -91,7 +91,7 @@ def double(number): sentry_sdk.flush() - if propagate_hub: + if propagate_scope: assert len(events) == 1 (event,) = events assert event["spans"][0]["trace_id"] == event["spans"][1]["trace_id"] diff --git a/tests/test_scope.py b/tests/test_scope.py index 48b8782190..308c7bd6c5 100644 --- a/tests/test_scope.py +++ b/tests/test_scope.py @@ -15,13 +15,11 @@ ScopeType, should_send_default_pii, ) -from sentry_sdk.integrations.opentelemetry.integration import ( - _setup_scope_context_management, -) from sentry_sdk.integrations.opentelemetry.scope import ( PotelScope as Scope, use_scope, use_isolation_scope, + setup_scope_context_management, ) @@ -31,7 +29,7 @@ @pytest.fixture(autouse=True) def setup_otel_scope_management(): - _setup_scope_context_management() + setup_scope_context_management() def test_copying(): From 7c70b9c1455917c3e35416e53712ebf25b7d6236 Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Thu, 5 Dec 2024 17:35:07 +0100 Subject: [PATCH 147/244] Fix leftover scope test --- tests/test_scope.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/tests/test_scope.py b/tests/test_scope.py index 858295536c..e7194e6caf 100644 --- a/tests/test_scope.py +++ b/tests/test_scope.py @@ -21,6 +21,7 @@ use_isolation_scope, setup_scope_context_management, ) +from tests.conftest import ApproxDict @pytest.fixture(autouse=True) @@ -800,8 +801,8 @@ def test_nested_scopes_with_tags(sentry_init, capture_envelopes): transaction = envelope.items[0].get_transaction_event() assert transaction["tags"] == {"isolation_scope1": 1, "current_scope2": 1, "trx": 1} - assert transaction["spans"][0]["tags"] == {"a": 1} - assert transaction["spans"][1]["tags"] == {"b": 1} + assert transaction["spans"][0]["tags"] == ApproxDict({"a": 1}) + assert transaction["spans"][1]["tags"] == ApproxDict({"b": 1}) def test_should_send_default_pii_true(sentry_init): From bcadb616b09793e852f96b987956dff7bbb0e122 Mon Sep 17 00:00:00 2001 From: Ivana Kellyer Date: Thu, 5 Dec 2024 17:36:34 +0100 Subject: [PATCH 148/244] Sampling context improvements (#3847) --- MIGRATION_GUIDE.md | 199 +++++++++++---------- sentry_sdk/integrations/_wsgi_common.py | 16 +- sentry_sdk/integrations/aiohttp.py | 7 +- sentry_sdk/integrations/asgi.py | 11 +- sentry_sdk/integrations/aws_lambda.py | 15 +- sentry_sdk/integrations/celery/__init__.py | 13 +- sentry_sdk/integrations/gcp.py | 10 +- sentry_sdk/integrations/rq.py | 16 +- sentry_sdk/integrations/tornado.py | 9 +- sentry_sdk/integrations/wsgi.py | 11 +- tests/integrations/aiohttp/test_aiohttp.py | 5 +- tests/integrations/asgi/test_asgi.py | 3 +- tests/integrations/aws_lambda/test_aws.py | 3 +- tests/integrations/celery/test_celery.py | 11 +- tests/integrations/gcp/test_gcp.py | 15 +- tests/integrations/rq/test_rq.py | 16 +- tests/integrations/tornado/test_tornado.py | 3 +- tests/integrations/wsgi/test_wsgi.py | 9 +- 18 files changed, 221 insertions(+), 151 deletions(-) diff --git a/MIGRATION_GUIDE.md b/MIGRATION_GUIDE.md index 6f0aeb4510..d78abe14c5 100644 --- a/MIGRATION_GUIDE.md +++ b/MIGRATION_GUIDE.md @@ -20,102 +20,109 @@ Looking to upgrade from Sentry SDK 2.x to 3.x? Here's a comprehensive list of wh - Redis integration: In Redis pipeline spans there is no `span["data"]["redis.commands"]` that contains a dict `{"count": 3, "first_ten": ["cmd1", "cmd2", ...]}` but instead `span["data"]["redis.commands.count"]` (containing `3`) and `span["data"]["redis.commands.first_ten"]` (containing `["cmd1", "cmd2", ...]`). - clickhouse-driver integration: The query is now available under the `db.query.text` span attribute (only if `send_default_pii` is `True`). - `sentry_sdk.init` now returns `None` instead of a context manager. -- The `sampling_context` argument of `traces_sampler` now additionally contains all span attributes known at span start. -- If you're using the Celery integration, the `sampling_context` argument of `traces_sampler` doesn't contain the `celery_job` dictionary anymore. Instead, the individual keys are now available as: - - | Dictionary keys | Sampling context key | - | ---------------------- | -------------------- | - | `celery_job["args"]` | `celery.job.args` | - | `celery_job["kwargs"]` | `celery.job.kwargs` | - | `celery_job["task"]` | `celery.job.task` | - - Note that all of these are serialized, i.e., not the original `args` and `kwargs` but rather OpenTelemetry-friendly span attributes. - -- If you're using the AIOHTTP integration, the `sampling_context` argument of `traces_sampler` doesn't contain the `aiohttp_request` object anymore. Instead, some of the individual properties of the request are accessible, if available, as follows: - - | Request property | Sampling context key(s) | - | ---------------- | ------------------------------- | - | `path` | `url.path` | - | `query_string` | `url.query` | - | `method` | `http.request.method` | - | `host` | `server.address`, `server.port` | - | `scheme` | `url.scheme` | - | full URL | `url.full` | - -- If you're using the Tornado integration, the `sampling_context` argument of `traces_sampler` doesn't contain the `tornado_request` object anymore. Instead, some of the individual properties of the request are accessible, if available, as follows: - - | Request property | Sampling context key(s) | - | ---------------- | --------------------------------------------------- | - | `path` | `url.path` | - | `query` | `url.query` | - | `protocol` | `url.scheme` | - | `method` | `http.request.method` | - | `host` | `server.address`, `server.port` | - | `version` | `network.protocol.name`, `network.protocol.version` | - | full URL | `url.full` | - -- If you're using the generic WSGI integration, the `sampling_context` argument of `traces_sampler` doesn't contain the `wsgi_environ` object anymore. Instead, the individual properties of the environment are accessible, if available, as follows: - - | Env property | Sampling context key(s) | - | ----------------- | ------------------------------------------------- | - | `PATH_INFO` | `url.path` | - | `QUERY_STRING` | `url.query` | - | `REQUEST_METHOD` | `http.request.method` | - | `SERVER_NAME` | `server.address` | - | `SERVER_PORT` | `server.port` | - | `SERVER_PROTOCOL` | `server.protocol.name`, `server.protocol.version` | - | `wsgi.url_scheme` | `url.scheme` | - | full URL | `url.full` | - -- If you're using the generic ASGI integration, the `sampling_context` argument of `traces_sampler` doesn't contain the `asgi_scope` object anymore. Instead, the individual properties of the scope, if available, are accessible as follows: - - | Scope property | Sampling context key(s) | - | -------------- | ------------------------------- | - | `type` | `network.protocol.name` | - | `scheme` | `url.scheme` | - | `path` | `url.path` | - | `query` | `url.query` | - | `http_version` | `network.protocol.version` | - | `method` | `http.request.method` | - | `server` | `server.address`, `server.port` | - | `client` | `client.address`, `client.port` | - | full URL | `url.full` | - -- If you're using the RQ integration, the `sampling_context` argument of `traces_sampler` doesn't contain the `rq_job` object anymore. Instead, the individual properties of the job and the queue, if available, are accessible as follows: - - | RQ property | Sampling context key(s) | - | --------------- | ---------------------------- | - | `rq_job.args` | `rq.job.args` | - | `rq_job.kwargs` | `rq.job.kwargs` | - | `rq_job.func` | `rq.job.func` | - | `queue.name` | `messaging.destination.name` | - | `rq_job.id` | `messaging.message.id` | - - Note that `rq.job.args`, `rq.job.kwargs`, and `rq.job.func` are serialized and not the actual objects on the job. - -- If you're using the AWS Lambda integration, the `sampling_context` argument of `traces_sampler` doesn't contain the `aws_event` and `aws_context` objects anymore. Instead, the following, if available, is accessible: - - | AWS property | Sampling context key(s) | - | ------------------------------------------- | ----------------------- | - | `aws_event["httpMethod"]` | `http.request.method` | - | `aws_event["queryStringParameters"]` | `url.query` | - | `aws_event["path"]` | `url.path` | - | full URL | `url.full` | - | `aws_event["headers"]["X-Forwarded-Proto"]` | `network.protocol.name` | - | `aws_event["headers"]["Host"]` | `server.address` | - | `aws_context["function_name"]` | `faas.name` | - -- If you're using the GCP integration, the `sampling_context` argument of `traces_sampler` doesn't contain the `gcp_env` and `gcp_event` keys anymore. Instead, the following, if available, is accessible: - - | Old sampling context key | New sampling context key | - | --------------------------------- | -------------------------- | - | `gcp_env["function_name"]` | `faas.name` | - | `gcp_env["function_region"]` | `faas.region` | - | `gcp_env["function_project"]` | `gcp.function.project` | - | `gcp_env["function_identity"]` | `gcp.function.identity` | - | `gcp_env["function_entry_point"]` | `gcp.function.entry_point` | - | `gcp_event.method` | `http.request.method` | - | `gcp_event.query_string` | `url.query` | +- The `sampling_context` argument of `traces_sampler` and `profiles_sampler` now additionally contains all span attributes known at span start. +- The integration-specific content of the `sampling_context` argument of `traces_sampler` and `profiles_sampler` now looks different. + - The Celery integration doesn't add the `celery_job` dictionary anymore. Instead, the individual keys are now available as: + + | Dictionary keys | Sampling context key | Example | + | ---------------------- | --------------------------- | ------------------------------ | + | `celery_job["args"]` | `celery.job.args.{index}` | `celery.job.args.0` | + | `celery_job["kwargs"]` | `celery.job.kwargs.{kwarg}` | `celery.job.kwargs.kwarg_name` | + | `celery_job["task"]` | `celery.job.task` | | + + Note that all of these are serialized, i.e., not the original `args` and `kwargs` but rather OpenTelemetry-friendly span attributes. + + - The AIOHTTP integration doesn't add the `aiohttp_request` object anymore. Instead, some of the individual properties of the request are accessible, if available, as follows: + + | Request property | Sampling context key(s) | + | ----------------- | ------------------------------- | + | `path` | `url.path` | + | `query_string` | `url.query` | + | `method` | `http.request.method` | + | `host` | `server.address`, `server.port` | + | `scheme` | `url.scheme` | + | full URL | `url.full` | + | `request.headers` | `http.request.header.{header}` | + + - The Tornado integration doesn't add the `tornado_request` object anymore. Instead, some of the individual properties of the request are accessible, if available, as follows: + + | Request property | Sampling context key(s) | + | ----------------- | --------------------------------------------------- | + | `path` | `url.path` | + | `query` | `url.query` | + | `protocol` | `url.scheme` | + | `method` | `http.request.method` | + | `host` | `server.address`, `server.port` | + | `version` | `network.protocol.name`, `network.protocol.version` | + | full URL | `url.full` | + | `request.headers` | `http.request.header.{header}` | + + - The WSGI integration doesn't add the `wsgi_environ` object anymore. Instead, the individual properties of the environment are accessible, if available, as follows: + + | Env property | Sampling context key(s) | + | ----------------- | ------------------------------------------------- | + | `PATH_INFO` | `url.path` | + | `QUERY_STRING` | `url.query` | + | `REQUEST_METHOD` | `http.request.method` | + | `SERVER_NAME` | `server.address` | + | `SERVER_PORT` | `server.port` | + | `SERVER_PROTOCOL` | `server.protocol.name`, `server.protocol.version` | + | `wsgi.url_scheme` | `url.scheme` | + | full URL | `url.full` | + | `HTTP_*` | `http.request.header.{header}` | + + - The ASGI integration doesn't add the `asgi_scope` object anymore. Instead, the individual properties of the scope, if available, are accessible as follows: + + | Scope property | Sampling context key(s) | + | -------------- | ------------------------------- | + | `type` | `network.protocol.name` | + | `scheme` | `url.scheme` | + | `path` | `url.path` | + | `query` | `url.query` | + | `http_version` | `network.protocol.version` | + | `method` | `http.request.method` | + | `server` | `server.address`, `server.port` | + | `client` | `client.address`, `client.port` | + | full URL | `url.full` | + | `headers` | `http.request.header.{header}` | + + -The RQ integration doesn't add the `rq_job` object anymore. Instead, the individual properties of the job and the queue, if available, are accessible as follows: + + | RQ property | Sampling context key | Example | + | --------------- | ---------------------------- | ---------------------- | + | `rq_job.args` | `rq.job.args.{index}` | `rq.job.args.0` | + | `rq_job.kwargs` | `rq.job.kwargs.{kwarg}` | `rq.job.args.my_kwarg` | + | `rq_job.func` | `rq.job.func` | | + | `queue.name` | `messaging.destination.name` | | + | `rq_job.id` | `messaging.message.id` | | + + Note that `rq.job.args`, `rq.job.kwargs`, and `rq.job.func` are serialized and not the actual objects on the job. + + - The AWS Lambda integration doesn't add the `aws_event` and `aws_context` objects anymore. Instead, the following, if available, is accessible: + + | AWS property | Sampling context key(s) | + | ------------------------------------------- | ------------------------------- | + | `aws_event["httpMethod"]` | `http.request.method` | + | `aws_event["queryStringParameters"]` | `url.query` | + | `aws_event["path"]` | `url.path` | + | full URL | `url.full` | + | `aws_event["headers"]["X-Forwarded-Proto"]` | `network.protocol.name` | + | `aws_event["headers"]["Host"]` | `server.address` | + | `aws_context["function_name"]` | `faas.name` | + | `aws_event["headers"]` | `http.request.headers.{header}` | + + - The GCP integration doesn't add the `gcp_env` and `gcp_event` keys anymore. Instead, the following, if available, is accessible: + + | Old sampling context key | New sampling context key | + | --------------------------------- | ------------------------------ | + | `gcp_env["function_name"]` | `faas.name` | + | `gcp_env["function_region"]` | `faas.region` | + | `gcp_env["function_project"]` | `gcp.function.project` | + | `gcp_env["function_identity"]` | `gcp.function.identity` | + | `gcp_env["function_entry_point"]` | `gcp.function.entry_point` | + | `gcp_event.method` | `http.request.method` | + | `gcp_event.query_string` | `url.query` | + | `gcp_event.headers` | `http.request.header.{header}` | ### Removed diff --git a/sentry_sdk/integrations/_wsgi_common.py b/sentry_sdk/integrations/_wsgi_common.py index 072a102b7c..8adbe47224 100644 --- a/sentry_sdk/integrations/_wsgi_common.py +++ b/sentry_sdk/integrations/_wsgi_common.py @@ -3,7 +3,7 @@ import sentry_sdk from sentry_sdk.scope import should_send_default_pii -from sentry_sdk.utils import AnnotatedValue, logger +from sentry_sdk.utils import AnnotatedValue, logger, SENSITIVE_DATA_SUBSTITUTE try: from django.http.request import RawPostDataException @@ -221,6 +221,20 @@ def _filter_headers(headers): } +def _request_headers_to_span_attributes(headers): + # type: (dict[str, str]) -> dict[str, str] + attributes = {} + + headers = _filter_headers(headers) + + for header, value in headers.items(): + if isinstance(value, AnnotatedValue): + value = SENSITIVE_DATA_SUBSTITUTE + attributes[f"http.request.header.{header.lower()}"] = value + + return attributes + + def _in_http_status_code_range(code, code_ranges): # type: (object, list[HttpStatusCodeRange]) -> bool for target in code_ranges: diff --git a/sentry_sdk/integrations/aiohttp.py b/sentry_sdk/integrations/aiohttp.py index ccc4593606..59bc70e5d4 100644 --- a/sentry_sdk/integrations/aiohttp.py +++ b/sentry_sdk/integrations/aiohttp.py @@ -13,6 +13,7 @@ from sentry_sdk.sessions import track_session from sentry_sdk.integrations._wsgi_common import ( _filter_headers, + _request_headers_to_span_attributes, request_body_within_bounds, ) from sentry_sdk.tracing import ( @@ -389,11 +390,11 @@ def _prepopulate_attributes(request): except ValueError: attributes["server.address"] = request.host - try: + with capture_internal_exceptions(): url = f"{request.scheme}://{request.host}{request.path}" # noqa: E231 if request.query_string: attributes["url.full"] = f"{url}?{request.query_string}" - except Exception: - pass + + attributes.update(_request_headers_to_span_attributes(dict(request.headers))) return attributes diff --git a/sentry_sdk/integrations/asgi.py b/sentry_sdk/integrations/asgi.py index 80c24b8cb6..4a3fe830eb 100644 --- a/sentry_sdk/integrations/asgi.py +++ b/sentry_sdk/integrations/asgi.py @@ -21,6 +21,7 @@ ) from sentry_sdk.integrations._wsgi_common import ( DEFAULT_HTTP_METHODS_TO_CAPTURE, + _request_headers_to_span_attributes, ) from sentry_sdk.sessions import track_session from sentry_sdk.tracing import ( @@ -32,6 +33,7 @@ ) from sentry_sdk.utils import ( ContextVar, + capture_internal_exceptions, event_from_exception, HAS_REAL_CONTEXTVARS, CONTEXTVARS_ERROR_MESSAGE, @@ -348,11 +350,12 @@ def _prepopulate_attributes(scope): try: host, port = scope[attr] attributes[f"{attr}.address"] = host - attributes[f"{attr}.port"] = port + if port is not None: + attributes[f"{attr}.port"] = port except Exception: pass - try: + with capture_internal_exceptions(): full_url = _get_url(scope) query = _get_query(scope) if query: @@ -360,7 +363,7 @@ def _prepopulate_attributes(scope): full_url = f"{full_url}?{query}" attributes["url.full"] = full_url - except Exception: - pass + + attributes.update(_request_headers_to_span_attributes(_get_headers(scope))) return attributes diff --git a/sentry_sdk/integrations/aws_lambda.py b/sentry_sdk/integrations/aws_lambda.py index 177d73a638..cd2b3cc417 100644 --- a/sentry_sdk/integrations/aws_lambda.py +++ b/sentry_sdk/integrations/aws_lambda.py @@ -20,7 +20,10 @@ reraise, ) from sentry_sdk.integrations import Integration -from sentry_sdk.integrations._wsgi_common import _filter_headers +from sentry_sdk.integrations._wsgi_common import ( + _filter_headers, + _request_headers_to_span_attributes, +) from typing import TYPE_CHECKING @@ -162,7 +165,7 @@ def sentry_handler(aws_event, aws_context, *args, **kwargs): name=aws_context.function_name, source=TRANSACTION_SOURCE_COMPONENT, origin=AwsLambdaIntegration.origin, - attributes=_prepopulate_attributes(aws_event, aws_context), + attributes=_prepopulate_attributes(request_data, aws_context), ): try: return handler(aws_event, aws_context, *args, **kwargs) @@ -468,6 +471,7 @@ def _event_from_error_json(error_json): def _prepopulate_attributes(aws_event, aws_context): + # type: (Any, Any) -> dict[str, Any] attributes = { "cloud.provider": "aws", } @@ -486,10 +490,15 @@ def _prepopulate_attributes(aws_event, aws_context): url += f"?{aws_event['queryStringParameters']}" attributes["url.full"] = url - headers = aws_event.get("headers") or {} + headers = {} + if aws_event.get("headers") and isinstance(aws_event["headers"], dict): + headers = aws_event["headers"] + if headers.get("X-Forwarded-Proto"): attributes["network.protocol.name"] = headers["X-Forwarded-Proto"] if headers.get("Host"): attributes["server.address"] = headers["Host"] + attributes.update(_request_headers_to_span_attributes(headers)) + return attributes diff --git a/sentry_sdk/integrations/celery/__init__.py b/sentry_sdk/integrations/celery/__init__.py index 0b66bbf05c..a943871335 100644 --- a/sentry_sdk/integrations/celery/__init__.py +++ b/sentry_sdk/integrations/celery/__init__.py @@ -20,7 +20,6 @@ ensure_integration_enabled, event_from_exception, reraise, - _serialize_span_attribute, ) from typing import TYPE_CHECKING @@ -514,9 +513,17 @@ def sentry_publish(self, *args, **kwargs): def _prepopulate_attributes(task, args, kwargs): + # type: (Any, *Any, **Any) -> dict[str, str] attributes = { "celery.job.task": task.name, - "celery.job.args": _serialize_span_attribute(args), - "celery.job.kwargs": _serialize_span_attribute(kwargs), } + + for i, arg in enumerate(args): + with capture_internal_exceptions(): + attributes[f"celery.job.args.{i}"] = str(arg) + + for kwarg, value in kwargs.items(): + with capture_internal_exceptions(): + attributes[f"celery.job.kwargs.{kwarg}"] = str(value) + return attributes diff --git a/sentry_sdk/integrations/gcp.py b/sentry_sdk/integrations/gcp.py index 2f17464f70..dd23ad1e0a 100644 --- a/sentry_sdk/integrations/gcp.py +++ b/sentry_sdk/integrations/gcp.py @@ -7,7 +7,10 @@ import sentry_sdk from sentry_sdk.consts import OP from sentry_sdk.integrations import Integration -from sentry_sdk.integrations._wsgi_common import _filter_headers +from sentry_sdk.integrations._wsgi_common import ( + _filter_headers, + _request_headers_to_span_attributes, +) from sentry_sdk.scope import should_send_default_pii from sentry_sdk.tracing import TRANSACTION_SOURCE_COMPONENT from sentry_sdk.utils import ( @@ -236,6 +239,7 @@ def _get_google_cloud_logs_url(final_time): def _prepopulate_attributes(gcp_event): + # type: (Any) -> dict[str, Any] attributes = { "cloud.provider": "gcp", } @@ -248,4 +252,8 @@ def _prepopulate_attributes(gcp_event): if getattr(gcp_event, key, None): attributes[attr] = getattr(gcp_event, key) + if hasattr(gcp_event, "headers"): + headers = gcp_event.headers + attributes.update(_request_headers_to_span_attributes(headers)) + return attributes diff --git a/sentry_sdk/integrations/rq.py b/sentry_sdk/integrations/rq.py index b097b253ce..fb99fc1b89 100644 --- a/sentry_sdk/integrations/rq.py +++ b/sentry_sdk/integrations/rq.py @@ -6,7 +6,6 @@ from sentry_sdk.integrations.logging import ignore_logger from sentry_sdk.tracing import TRANSACTION_SOURCE_TASK from sentry_sdk.utils import ( - _serialize_span_attribute, capture_internal_exceptions, ensure_integration_enabled, event_from_exception, @@ -183,6 +182,7 @@ def _prepopulate_attributes(job, queue): # type: (Job, Queue) -> dict[str, Any] attributes = { "messaging.system": "rq", + "rq.job.id": job.id, } for prop, attr in JOB_PROPERTY_TO_ATTRIBUTE.items(): @@ -193,14 +193,20 @@ def _prepopulate_attributes(job, queue): if getattr(queue, prop, None) is not None: attributes[attr] = getattr(queue, prop) - for key in ("args", "kwargs"): - if getattr(job, key, None): - attributes[f"rq.job.{key}"] = _serialize_span_attribute(getattr(job, key)) + if getattr(job, "args", None): + for i, arg in enumerate(job.args): + with capture_internal_exceptions(): + attributes[f"rq.job.args.{i}"] = str(arg) + + if getattr(job, "kwargs", None): + for kwarg, value in job.kwargs.items(): + with capture_internal_exceptions(): + attributes[f"rq.job.kwargs.{kwarg}"] = str(value) func = job.func if callable(func): func = func.__name__ - attributes["rq.job.func"] = _serialize_span_attribute(func) + attributes["rq.job.func"] = str(func) return attributes diff --git a/sentry_sdk/integrations/tornado.py b/sentry_sdk/integrations/tornado.py index 591f59ec03..bb40fbf625 100644 --- a/sentry_sdk/integrations/tornado.py +++ b/sentry_sdk/integrations/tornado.py @@ -22,6 +22,7 @@ RequestExtractor, _filter_headers, _is_json_content_type, + _request_headers_to_span_attributes, ) from sentry_sdk.integrations.logging import ignore_logger @@ -246,7 +247,7 @@ def _prepopulate_attributes(request): except ValueError: attributes["network.protocol.name"] = request.version - if getattr(request, "host", None) is not None: + if getattr(request, "host", None): try: address, port = request.host.split(":") attributes["server.address"] = address @@ -254,9 +255,9 @@ def _prepopulate_attributes(request): except ValueError: attributes["server.address"] = request.host - try: + with capture_internal_exceptions(): attributes["url.full"] = request.full_url() - except Exception: - pass + + attributes.update(_request_headers_to_span_attributes(request.headers)) return attributes diff --git a/sentry_sdk/integrations/wsgi.py b/sentry_sdk/integrations/wsgi.py index 726a310482..7f7360a341 100644 --- a/sentry_sdk/integrations/wsgi.py +++ b/sentry_sdk/integrations/wsgi.py @@ -9,9 +9,9 @@ from sentry_sdk.integrations._wsgi_common import ( DEFAULT_HTTP_METHODS_TO_CAPTURE, _filter_headers, + _request_headers_to_span_attributes, ) from sentry_sdk.sessions import track_session -from sentry_sdk.scope import use_isolation_scope from sentry_sdk.tracing import Transaction, TRANSACTION_SOURCE_ROUTE from sentry_sdk.utils import ( ContextVar, @@ -324,6 +324,7 @@ def event_processor(event, hint): def _prepopulate_attributes(wsgi_environ, use_x_forwarded_for=False): + # type: (dict[str, str], bool) -> dict[str, str] """Extract span attributes from the WSGI environment.""" attributes = {} @@ -339,11 +340,13 @@ def _prepopulate_attributes(wsgi_environ, use_x_forwarded_for=False): except Exception: attributes["network.protocol.name"] = wsgi_environ["SERVER_PROTOCOL"] - try: + with capture_internal_exceptions(): url = get_request_url(wsgi_environ, use_x_forwarded_for) query = wsgi_environ.get("QUERY_STRING") attributes["url.full"] = f"{url}?{query}" - except Exception: - pass + + attributes.update( + _request_headers_to_span_attributes(dict(_get_headers(wsgi_environ))) + ) return attributes diff --git a/tests/integrations/aiohttp/test_aiohttp.py b/tests/integrations/aiohttp/test_aiohttp.py index 8327832acc..4b491b152e 100644 --- a/tests/integrations/aiohttp/test_aiohttp.py +++ b/tests/integrations/aiohttp/test_aiohttp.py @@ -309,7 +309,9 @@ async def kangaroo_handler(request): app.router.add_get("/tricks/kangaroo", kangaroo_handler) client = await aiohttp_client(app) - await client.get("/tricks/kangaroo?jump=high") + await client.get( + "/tricks/kangaroo?jump=high", headers={"Custom-Header": "Custom Value"} + ) assert traces_sampler.call_count == 1 sampling_context = traces_sampler.call_args_list[0][0][0] @@ -324,6 +326,7 @@ async def kangaroo_handler(request): assert sampling_context["http.request.method"] == "GET" assert sampling_context["server.address"] == "127.0.0.1" assert sampling_context["server.port"].isnumeric() + assert sampling_context["http.request.header.custom-header"] == "Custom Value" @pytest.mark.asyncio diff --git a/tests/integrations/asgi/test_asgi.py b/tests/integrations/asgi/test_asgi.py index adfd798c72..153117f8ee 100644 --- a/tests/integrations/asgi/test_asgi.py +++ b/tests/integrations/asgi/test_asgi.py @@ -733,6 +733,7 @@ def dummy_traces_sampler(sampling_context): assert sampling_context["http.request.method"] == "GET" assert sampling_context["network.protocol.version"] == "1.1" assert sampling_context["network.protocol.name"] == "http" + assert sampling_context["http.request.header.custom-header"] == "Custom Value" sentry_init( traces_sampler=dummy_traces_sampler, @@ -742,4 +743,4 @@ def dummy_traces_sampler(sampling_context): app = SentryAsgiMiddleware(asgi3_app) async with TestClient(app) as client: - await client.get("/test?hello=there") + await client.get("/test?hello=there", headers={"Custom-Header": "Custom Value"}) diff --git a/tests/integrations/aws_lambda/test_aws.py b/tests/integrations/aws_lambda/test_aws.py index c1235ae0a0..e58fab292d 100644 --- a/tests/integrations/aws_lambda/test_aws.py +++ b/tests/integrations/aws_lambda/test_aws.py @@ -625,6 +625,7 @@ def test_handler(event, context): "url.full": "http://x.io/sit/stay/rollover?repeat=twice", "network.protocol.name": "http", "server.address": "x.io", + "http.request.header.custom-header": "Custom Value", } ) ) @@ -643,7 +644,7 @@ def test_handler(event, context): ) """ ), - b'{"httpMethod": "GET", "path": "/sit/stay/rollover", "query_string": {"repeat": "again"}, "headers": {"Host": "x.io", "X-Forwarded-Proto": "http"}}', + b'{"httpMethod": "GET", "path": "/sit/stay/rollover", "query_string": {"repeat": "again"}, "headers": {"Host": "x.io", "X-Forwarded-Proto": "http", "Custom-Header": "Custom Value"}}', ) assert response["Payload"]["AssertionError raised"] is False diff --git a/tests/integrations/celery/test_celery.py b/tests/integrations/celery/test_celery.py index 119e0d0e39..1011429098 100644 --- a/tests/integrations/celery/test_celery.py +++ b/tests/integrations/celery/test_celery.py @@ -13,7 +13,6 @@ _wrap_task_run, ) from sentry_sdk.integrations.celery.beat import _get_headers -from sentry_sdk.utils import _serialize_span_attribute from tests.conftest import ApproxDict @@ -448,12 +447,10 @@ def walk_dogs(x, y): sampling_context = traces_sampler.call_args_list[1][0][0] assert sampling_context["celery.job.task"] == "dog_walk" - assert sampling_context["celery.job.args"] == _serialize_span_attribute( - args_kwargs["args"] - ) - assert sampling_context["celery.job.kwargs"] == _serialize_span_attribute( - args_kwargs["kwargs"] - ) + for i, arg in enumerate(args_kwargs["args"]): + assert sampling_context[f"celery.job.args.{i}"] == str(arg) + for kwarg, value in args_kwargs["kwargs"].items(): + assert sampling_context[f"celery.job.kwargs.{kwarg}"] == str(value) def test_abstract_task(capture_events, celery, celery_invocation): diff --git a/tests/integrations/gcp/test_gcp.py b/tests/integrations/gcp/test_gcp.py index f33c1b35d7..3ea97cf0e6 100644 --- a/tests/integrations/gcp/test_gcp.py +++ b/tests/integrations/gcp/test_gcp.py @@ -293,11 +293,11 @@ def test_traces_sampler_gets_correct_values_in_sampling_context( dedent( """ functionhandler = None - event = { - "type": "chase", - "chasers": ["Maisey", "Charlie"], - "num_squirrels": 2, - } + + from collections import namedtuple + GCPEvent = namedtuple("GCPEvent", ["headers"]) + event = GCPEvent(headers={"Custom-Header": "Custom Value"}) + def cloud_function(functionhandler, event): # this runs after the transaction has started, which means we # can make assertions about traces_sampler @@ -310,14 +310,15 @@ def cloud_function(functionhandler, event): "gcp.function.entry_point": "cloud_function", "gcp.function.project": "SquirrelChasing", "cloud.provider": "gcp", + "http.request.header.custom-header": "Custom Value", }) ) except AssertionError: # catch the error and return it because the error itself will # get swallowed by the SDK as an "internal exception" - return {"AssertionError raised": True,} + return {"AssertionError raised": True} - return {"AssertionError raised": False,} + return {"AssertionError raised": False} """ ) + FUNCTIONS_PRELUDE diff --git a/tests/integrations/rq/test_rq.py b/tests/integrations/rq/test_rq.py index fbe5a521d3..c7eeb377e6 100644 --- a/tests/integrations/rq/test_rq.py +++ b/tests/integrations/rq/test_rq.py @@ -227,13 +227,23 @@ def test_traces_sampler_gets_correct_values_in_sampling_context(sentry_init): queue = rq.Queue(connection=FakeStrictRedis()) worker = rq.SimpleWorker([queue], connection=queue.connection) - queue.enqueue(do_trick, "Bodhi", trick="roll over") + queue.enqueue( + do_trick, + "Bodhi", + {"age": 5}, + trick="roll over", + times=2, + followup=["fetch", "give paw"], + ) worker.work(burst=True) sampling_context = traces_sampler.call_args_list[0][0][0] assert sampling_context["messaging.system"] == "rq" - assert sampling_context["rq.job.args"] == ["Bodhi"] - assert sampling_context["rq.job.kwargs"] == '{"trick": "roll over"}' + assert sampling_context["rq.job.args.0"] == "Bodhi" + assert sampling_context["rq.job.args.1"] == "{'age': 5}" + assert sampling_context["rq.job.kwargs.trick"] == "roll over" + assert sampling_context["rq.job.kwargs.times"] == "2" + assert sampling_context["rq.job.kwargs.followup"] == "['fetch', 'give paw']" assert sampling_context["rq.job.func"] == "do_trick" assert sampling_context["messaging.message.id"] assert sampling_context["messaging.destination.name"] == "default" diff --git a/tests/integrations/tornado/test_tornado.py b/tests/integrations/tornado/test_tornado.py index 7ad974c535..837da07434 100644 --- a/tests/integrations/tornado/test_tornado.py +++ b/tests/integrations/tornado/test_tornado.py @@ -467,6 +467,7 @@ def traces_sampler(sampling_context): assert sampling_context["server.port"].isnumeric() assert sampling_context["network.protocol.name"] == "HTTP" assert sampling_context["network.protocol.version"] == "1.1" + assert sampling_context["http.request.header.custom-header"] == "Custom Value" return True @@ -476,4 +477,4 @@ def traces_sampler(sampling_context): ) client = tornado_testcase(Application([(r"/hi", HelloHandler)])) - client.fetch("/hi?foo=bar") + client.fetch("/hi?foo=bar", headers={"Custom-Header": "Custom Value"}) diff --git a/tests/integrations/wsgi/test_wsgi.py b/tests/integrations/wsgi/test_wsgi.py index 5aad355277..487ccbfd69 100644 --- a/tests/integrations/wsgi/test_wsgi.py +++ b/tests/integrations/wsgi/test_wsgi.py @@ -1,5 +1,4 @@ from collections import Counter -from datetime import datetime from unittest import mock import pytest @@ -327,10 +326,7 @@ def dogpark(environ, start_response): assert error_event["contexts"]["trace"]["trace_id"] == trace_id -def test_traces_sampler_gets_correct_values_in_sampling_context( - sentry_init, - DictionaryContaining, # noqa:N803 -): +def test_traces_sampler_gets_correct_values_in_sampling_context(sentry_init): def app(environ, start_response): start_response("200 OK", []) return ["Go get the ball! Good dog!"] @@ -343,13 +339,14 @@ def traces_sampler(sampling_context): assert ( sampling_context["url.full"] == "http://localhost/dogs/are/great/?cats=too" ) + assert sampling_context["http.request.header.custom-header"] == "Custom Value" return True sentry_init(send_default_pii=True, traces_sampler=traces_sampler) app = SentryWsgiMiddleware(app) client = Client(app) - client.get("/dogs/are/great/?cats=too") + client.get("/dogs/are/great/?cats=too", headers={"Custom-Header": "Custom Value"}) def test_session_mode_defaults_to_request_mode_in_wsgi_handler( From a545ec090bfb2f95ca2508f65a01163f77c40130 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Fri, 6 Dec 2024 15:53:46 +0100 Subject: [PATCH 149/244] Properly sort breadcrumbs (#3864) --- sentry_sdk/scope.py | 3 ++- sentry_sdk/utils.py | 25 ++++++++++++++++++++++ tests/test_basics.py | 6 +++--- tests/test_utils.py | 50 ++++++++++++++++++++++++++++++++++++++++++++ 4 files changed, 80 insertions(+), 4 deletions(-) diff --git a/sentry_sdk/scope.py b/sentry_sdk/scope.py index a643cfa9c5..28fe89febe 100644 --- a/sentry_sdk/scope.py +++ b/sentry_sdk/scope.py @@ -33,6 +33,7 @@ capture_internal_exception, capture_internal_exceptions, ContextVar, + datetime_from_isoformat, disable_capture_event, event_from_exception, exc_info_from_error, @@ -1264,7 +1265,7 @@ def _apply_breadcrumbs_to_event(self, event, hint, options): try: for crumb in event["breadcrumbs"]["values"]: if isinstance(crumb["timestamp"], str): - crumb["timestamp"] = datetime.fromisoformat(crumb["timestamp"]) + crumb["timestamp"] = datetime_from_isoformat(crumb["timestamp"]) event["breadcrumbs"]["values"].sort(key=lambda crumb: crumb["timestamp"]) except Exception as err: diff --git a/sentry_sdk/utils.py b/sentry_sdk/utils.py index 8b45f555ae..16dfd9c4fa 100644 --- a/sentry_sdk/utils.py +++ b/sentry_sdk/utils.py @@ -1929,3 +1929,28 @@ def _serialize_span_attribute(value): return str(value) except Exception: return None + + +ISO_TZ_SEPARATORS = frozenset(("+", "-")) + + +def datetime_from_isoformat(value): + # type: (str) -> datetime + try: + result = datetime.fromisoformat(value) + except (AttributeError, ValueError): + # py 3.6 + timestamp_format = ( + "%Y-%m-%dT%H:%M:%S.%f" if "." in value else "%Y-%m-%dT%H:%M:%S" + ) + if value.endswith("Z"): + value = value[:-1] + "+0000" + + if value[-6] in ISO_TZ_SEPARATORS: + timestamp_format += "%z" + value = value[:-3] + value[-2:] + elif value[-5] in ISO_TZ_SEPARATORS: + timestamp_format += "%z" + + result = datetime.strptime(value, timestamp_format) + return result.astimezone(timezone.utc) diff --git a/tests/test_basics.py b/tests/test_basics.py index 749d31d7d3..3c05f9848a 100644 --- a/tests/test_basics.py +++ b/tests/test_basics.py @@ -32,7 +32,7 @@ from sentry_sdk.integrations.logging import LoggingIntegration from sentry_sdk.integrations.stdlib import StdlibIntegration from sentry_sdk.scope import add_global_event_processor -from sentry_sdk.utils import get_sdk_name, reraise +from sentry_sdk.utils import datetime_from_isoformat, get_sdk_name, reraise from sentry_sdk.tracing_utils import has_tracing_enabled @@ -348,7 +348,7 @@ def test_breadcrumb_ordering(sentry_init, capture_events): assert len(event["breadcrumbs"]["values"]) == len(timestamps) timestamps_from_event = [ - datetime.fromisoformat(x["timestamp"]) for x in event["breadcrumbs"]["values"] + datetime_from_isoformat(x["timestamp"]) for x in event["breadcrumbs"]["values"] ] assert timestamps_from_event == sorted(timestamps) @@ -389,7 +389,7 @@ def test_breadcrumb_ordering_different_types(sentry_init, capture_events): assert len(event["breadcrumbs"]["values"]) == len(timestamps) timestamps_from_event = [ - datetime.fromisoformat(x["timestamp"]) for x in event["breadcrumbs"]["values"] + datetime_from_isoformat(x["timestamp"]) for x in event["breadcrumbs"]["values"] ] assert timestamps_from_event == sorted(timestamps) diff --git a/tests/test_utils.py b/tests/test_utils.py index 5011662f05..2eab252573 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -12,6 +12,7 @@ from sentry_sdk.utils import ( Components, Dsn, + datetime_from_isoformat, env_to_bool, format_timestamp, get_current_thread_meta, @@ -933,3 +934,52 @@ def __str__(self): ) def test_serialize_span_attribute(value, result): assert _serialize_span_attribute(value) == result + + +@pytest.mark.parametrize( + ("input_str", "expected_output"), + ( + ( + "2021-01-01T00:00:00.000000Z", + datetime(2021, 1, 1, tzinfo=timezone.utc), + ), # UTC time + ( + "2021-01-01T00:00:00.000000", + datetime(2021, 1, 1, tzinfo=datetime.now().astimezone().tzinfo), + ), # No TZ -- assume UTC + ( + "2021-01-01T00:00:00Z", + datetime(2021, 1, 1, tzinfo=timezone.utc), + ), # UTC - No milliseconds + ( + "2021-01-01T00:00:00.000000+00:00", + datetime(2021, 1, 1, tzinfo=timezone.utc), + ), + ( + "2021-01-01T00:00:00.000000-00:00", + datetime(2021, 1, 1, tzinfo=timezone.utc), + ), + ( + "2021-01-01T00:00:00.000000+0000", + datetime(2021, 1, 1, tzinfo=timezone.utc), + ), + ( + "2021-01-01T00:00:00.000000-0000", + datetime(2021, 1, 1, tzinfo=timezone.utc), + ), + ( + "2020-12-31T00:00:00.000000+02:00", + datetime(2020, 12, 31, tzinfo=timezone(timedelta(hours=2))), + ), # UTC+2 time + ( + "2020-12-31T00:00:00.000000-0200", + datetime(2020, 12, 31, tzinfo=timezone(timedelta(hours=-2))), + ), # UTC-2 time + ( + "2020-12-31T00:00:00-0200", + datetime(2020, 12, 31, tzinfo=timezone(timedelta(hours=-2))), + ), # UTC-2 time - no milliseconds + ), +) +def test_datetime_from_isoformat(input_str, expected_output): + assert datetime_from_isoformat(input_str) == expected_output, input_str \ No newline at end of file From 9d20ffdeb024cebe74f59d230ff4ec7d4101e1f7 Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Mon, 16 Dec 2024 17:51:11 +0530 Subject: [PATCH 150/244] Fix grpc aio method handling (#3873) --- sentry_sdk/integrations/grpc/aio/client.py | 8 ++- sentry_sdk/integrations/rust_tracing.py | 2 +- tests/integrations/grpc/test_grpc.py | 67 +++++++++++----------- tests/integrations/grpc/test_grpc_aio.py | 48 ++++++++-------- tests/test_utils.py | 2 +- 5 files changed, 63 insertions(+), 64 deletions(-) diff --git a/sentry_sdk/integrations/grpc/aio/client.py b/sentry_sdk/integrations/grpc/aio/client.py index 1a7086c55d..2fd9f70bed 100644 --- a/sentry_sdk/integrations/grpc/aio/client.py +++ b/sentry_sdk/integrations/grpc/aio/client.py @@ -44,10 +44,12 @@ async def intercept_unary_unary( request: Message, ) -> Union[UnaryUnaryCall, Message]: method = client_call_details.method + if isinstance(method, bytes): + method = method.decode() with sentry_sdk.start_span( op=OP.GRPC_CLIENT, - name="unary unary call to %s" % method.decode(), + name="unary unary call to %s" % method, origin=SPAN_ORIGIN, only_if_parent=True, ) as span: @@ -75,10 +77,12 @@ async def intercept_unary_stream( request: Message, ) -> Union[AsyncIterable[Any], UnaryStreamCall]: method = client_call_details.method + if isinstance(method, bytes): + method = method.decode() with sentry_sdk.start_span( op=OP.GRPC_CLIENT, - name="unary stream call to %s" % method.decode(), + name="unary stream call to %s" % method, origin=SPAN_ORIGIN, only_if_parent=True, ) as span: diff --git a/sentry_sdk/integrations/rust_tracing.py b/sentry_sdk/integrations/rust_tracing.py index d394ba5712..68b807064a 100644 --- a/sentry_sdk/integrations/rust_tracing.py +++ b/sentry_sdk/integrations/rust_tracing.py @@ -32,7 +32,7 @@ import json from enum import Enum, auto -from typing import Any, Callable, Dict, Tuple, Optional +from typing import Any, Callable, Dict, Optional import sentry_sdk from sentry_sdk.integrations import Integration diff --git a/tests/integrations/grpc/test_grpc.py b/tests/integrations/grpc/test_grpc.py index a8872ef0b5..51eaef7339 100644 --- a/tests/integrations/grpc/test_grpc.py +++ b/tests/integrations/grpc/test_grpc.py @@ -7,7 +7,7 @@ from typing import List, Optional from unittest.mock import Mock -from sentry_sdk import start_span, start_transaction +from sentry_sdk import start_span from sentry_sdk.consts import OP from sentry_sdk.integrations.grpc import GRPCIntegration from tests.conftest import ApproxDict @@ -41,7 +41,7 @@ def _tear_down(server: grpc.Server): @pytest.mark.forked -def test_grpc_server_starts_transaction(sentry_init, capture_events_forksafe): +def test_grpc_server_starts_root_span(sentry_init, capture_events_forksafe): sentry_init(traces_sample_rate=1.0, integrations=[GRPCIntegration()]) events = capture_events_forksafe() @@ -99,7 +99,7 @@ def test_grpc_server_other_interceptors(sentry_init, capture_events_forksafe): @pytest.mark.forked -def test_grpc_server_continues_transaction(sentry_init, capture_events_forksafe): +def test_grpc_server_continues_trace(sentry_init, capture_events_forksafe): sentry_init(traces_sample_rate=1.0, integrations=[GRPCIntegration()]) events = capture_events_forksafe() @@ -108,20 +108,20 @@ def test_grpc_server_continues_transaction(sentry_init, capture_events_forksafe) with grpc.insecure_channel("localhost:{}".format(PORT)) as channel: stub = gRPCTestServiceStub(channel) - with start_transaction() as transaction: + with start_span() as root_span: metadata = ( ( "baggage", "sentry-trace_id={trace_id},sentry-environment=test," "sentry-transaction=test-transaction,sentry-sample_rate=1.0".format( - trace_id=transaction.trace_id + trace_id=root_span.trace_id ), ), ( "sentry-trace", "{trace_id}-{parent_span_id}-{sampled}".format( - trace_id=transaction.trace_id, - parent_span_id=transaction.span_id, + trace_id=root_span.trace_id, + parent_span_id=root_span.span_id, sampled=1, ), ), @@ -139,7 +139,7 @@ def test_grpc_server_continues_transaction(sentry_init, capture_events_forksafe) "source": "custom", } assert event["contexts"]["trace"]["op"] == OP.GRPC_SERVER - assert event["contexts"]["trace"]["trace_id"] == transaction.trace_id + assert event["contexts"]["trace"]["trace_id"] == root_span.trace_id assert span["op"] == "test" @@ -153,17 +153,17 @@ def test_grpc_client_starts_span(sentry_init, capture_events_forksafe): with grpc.insecure_channel("localhost:{}".format(PORT)) as channel: stub = gRPCTestServiceStub(channel) - with start_transaction(): + with start_span(): stub.TestServe(gRPCTestMessage(text="test")) _tear_down(server=server) events.write_file.close() events.read_event() - local_transaction = events.read_event() - span = local_transaction["spans"][0] + local_root_span = events.read_event() + span = local_root_span["spans"][0] - assert len(local_transaction["spans"]) == 1 + assert len(local_root_span["spans"]) == 1 assert span["op"] == OP.GRPC_CLIENT assert ( span["description"] @@ -188,16 +188,16 @@ def test_grpc_client_unary_stream_starts_span(sentry_init, capture_events_forksa with grpc.insecure_channel("localhost:{}".format(PORT)) as channel: stub = gRPCTestServiceStub(channel) - with start_transaction(): + with start_span(): [el for el in stub.TestUnaryStream(gRPCTestMessage(text="test"))] _tear_down(server=server) events.write_file.close() - local_transaction = events.read_event() - span = local_transaction["spans"][0] + local_root_span = events.read_event() + span = local_root_span["spans"][0] - assert len(local_transaction["spans"]) == 1 + assert len(local_root_span["spans"]) == 1 assert span["op"] == OP.GRPC_CLIENT assert ( span["description"] @@ -233,7 +233,7 @@ def test_grpc_client_other_interceptor(sentry_init, capture_events_forksafe): channel = grpc.intercept_channel(channel, MockClientInterceptor()) stub = gRPCTestServiceStub(channel) - with start_transaction(): + with start_span(): stub.TestServe(gRPCTestMessage(text="test")) _tear_down(server=server) @@ -242,10 +242,10 @@ def test_grpc_client_other_interceptor(sentry_init, capture_events_forksafe): events.write_file.close() events.read_event() - local_transaction = events.read_event() - span = local_transaction["spans"][0] + local_root_span = events.read_event() + span = local_root_span["spans"][0] - assert len(local_transaction["spans"]) == 1 + assert len(local_root_span["spans"]) == 1 assert span["op"] == OP.GRPC_CLIENT assert ( span["description"] @@ -272,18 +272,18 @@ def test_grpc_client_and_servers_interceptors_integration( with grpc.insecure_channel("localhost:{}".format(PORT)) as channel: stub = gRPCTestServiceStub(channel) - with start_transaction(): + with start_span(): stub.TestServe(gRPCTestMessage(text="test")) _tear_down(server=server) events.write_file.close() - server_transaction = events.read_event() - local_transaction = events.read_event() + server_root_span = events.read_event() + local_root_span = events.read_event() assert ( - server_transaction["contexts"]["trace"]["trace_id"] - == local_transaction["contexts"]["trace"]["trace_id"] + server_root_span["contexts"]["trace"]["trace_id"] + == local_root_span["contexts"]["trace"]["trace_id"] ) @@ -328,26 +328,23 @@ def test_span_origin(sentry_init, capture_events_forksafe): with grpc.insecure_channel("localhost:{}".format(PORT)) as channel: stub = gRPCTestServiceStub(channel) - with start_transaction(name="custom_transaction"): + with start_span(name="custom_root"): stub.TestServe(gRPCTestMessage(text="test")) _tear_down(server=server) events.write_file.close() - transaction_from_integration = events.read_event() - custom_transaction = events.read_event() + root_span_from_integration = events.read_event() + custom_root_span = events.read_event() + assert root_span_from_integration["contexts"]["trace"]["origin"] == "auto.grpc.grpc" assert ( - transaction_from_integration["contexts"]["trace"]["origin"] == "auto.grpc.grpc" - ) - assert ( - transaction_from_integration["spans"][0]["origin"] - == "auto.grpc.grpc.TestService" + root_span_from_integration["spans"][0]["origin"] == "auto.grpc.grpc.TestService" ) # manually created in TestService, not the instrumentation - assert custom_transaction["contexts"]["trace"]["origin"] == "manual" - assert custom_transaction["spans"][0]["origin"] == "auto.grpc.grpc" + assert custom_root_span["contexts"]["trace"]["origin"] == "manual" + assert custom_root_span["spans"][0]["origin"] == "auto.grpc.grpc" class TestService(gRPCTestServiceServicer): diff --git a/tests/integrations/grpc/test_grpc_aio.py b/tests/integrations/grpc/test_grpc_aio.py index 9ce9aef6a5..0d30c59681 100644 --- a/tests/integrations/grpc/test_grpc_aio.py +++ b/tests/integrations/grpc/test_grpc_aio.py @@ -6,7 +6,7 @@ import pytest_asyncio import sentry_sdk -from sentry_sdk import start_span, start_transaction +from sentry_sdk import start_span from sentry_sdk.consts import OP from sentry_sdk.integrations.grpc import GRPCIntegration from tests.conftest import ApproxDict @@ -60,7 +60,7 @@ async def test_noop_for_unimplemented_method(sentry_init, capture_events): @pytest.mark.asyncio -async def test_grpc_server_starts_transaction(grpc_server, capture_events): +async def test_grpc_server_starts_root_span(grpc_server, capture_events): events = capture_events() async with grpc.aio.insecure_channel("localhost:{}".format(AIO_PORT)) as channel: @@ -79,26 +79,26 @@ async def test_grpc_server_starts_transaction(grpc_server, capture_events): @pytest.mark.asyncio -async def test_grpc_server_continues_transaction(grpc_server, capture_events): +async def test_grpc_server_continues_trace(grpc_server, capture_events): events = capture_events() async with grpc.aio.insecure_channel("localhost:{}".format(AIO_PORT)) as channel: stub = gRPCTestServiceStub(channel) - with sentry_sdk.start_transaction() as transaction: + with sentry_sdk.start_span() as root_span: metadata = ( ( "baggage", "sentry-trace_id={trace_id},sentry-environment=test," "sentry-transaction=test-transaction,sentry-sample_rate=1.0".format( - trace_id=transaction.trace_id + trace_id=root_span.trace_id ), ), ( "sentry-trace", "{trace_id}-{parent_span_id}-{sampled}".format( - trace_id=transaction.trace_id, - parent_span_id=transaction.span_id, + trace_id=root_span.trace_id, + parent_span_id=root_span.span_id, sampled=1, ), ), @@ -114,7 +114,7 @@ async def test_grpc_server_continues_transaction(grpc_server, capture_events): "source": "custom", } assert event["contexts"]["trace"]["op"] == OP.GRPC_SERVER - assert event["contexts"]["trace"]["trace_id"] == transaction.trace_id + assert event["contexts"]["trace"]["trace_id"] == root_span.trace_id assert span["op"] == "test" @@ -159,15 +159,15 @@ async def test_grpc_client_starts_span(grpc_server, capture_events_forksafe): async with grpc.aio.insecure_channel("localhost:{}".format(AIO_PORT)) as channel: stub = gRPCTestServiceStub(channel) - with start_transaction(): + with start_span(): await stub.TestServe(gRPCTestMessage(text="test")) events.write_file.close() events.read_event() - local_transaction = events.read_event() - span = local_transaction["spans"][0] + local_root_span = events.read_event() + span = local_root_span["spans"][0] - assert len(local_transaction["spans"]) == 1 + assert len(local_root_span["spans"]) == 1 assert span["op"] == OP.GRPC_CLIENT assert ( span["description"] @@ -190,15 +190,15 @@ async def test_grpc_client_unary_stream_starts_span( async with grpc.aio.insecure_channel("localhost:{}".format(AIO_PORT)) as channel: stub = gRPCTestServiceStub(channel) - with start_transaction(): + with start_span(): response = stub.TestUnaryStream(gRPCTestMessage(text="test")) [_ async for _ in response] events.write_file.close() - local_transaction = events.read_event() - span = local_transaction["spans"][0] + local_root_span = events.read_event() + span = local_root_span["spans"][0] - assert len(local_transaction["spans"]) == 1 + assert len(local_root_span["spans"]) == 1 assert span["op"] == OP.GRPC_CLIENT assert ( span["description"] @@ -243,24 +243,22 @@ async def test_span_origin(grpc_server, capture_events_forksafe): async with grpc.aio.insecure_channel("localhost:{}".format(AIO_PORT)) as channel: stub = gRPCTestServiceStub(channel) - with start_transaction(name="custom_transaction"): + with start_span(name="custom_root"): await stub.TestServe(gRPCTestMessage(text="test")) events.write_file.close() - transaction_from_integration = events.read_event() - custom_transaction = events.read_event() + root_span_from_integration = events.read_event() + custom_root_span = events.read_event() + assert root_span_from_integration["contexts"]["trace"]["origin"] == "auto.grpc.grpc" assert ( - transaction_from_integration["contexts"]["trace"]["origin"] == "auto.grpc.grpc" - ) - assert ( - transaction_from_integration["spans"][0]["origin"] + root_span_from_integration["spans"][0]["origin"] == "auto.grpc.grpc.TestService.aio" ) # manually created in TestService, not the instrumentation - assert custom_transaction["contexts"]["trace"]["origin"] == "manual" - assert custom_transaction["spans"][0]["origin"] == "auto.grpc.grpc" + assert custom_root_span["contexts"]["trace"]["origin"] == "manual" + assert custom_root_span["spans"][0]["origin"] == "auto.grpc.grpc" class TestService(gRPCTestServiceServicer): diff --git a/tests/test_utils.py b/tests/test_utils.py index 2eab252573..8613079ebd 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -982,4 +982,4 @@ def test_serialize_span_attribute(value, result): ), ) def test_datetime_from_isoformat(input_str, expected_output): - assert datetime_from_isoformat(input_str) == expected_output, input_str \ No newline at end of file + assert datetime_from_isoformat(input_str) == expected_output, input_str From 1984a5ced9cc41d332a2dfca425fdf7354d3d9e8 Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Tue, 17 Dec 2024 13:48:23 +0530 Subject: [PATCH 151/244] Fix graphene integration --- sentry_sdk/integrations/graphene.py | 21 ++++++--------------- 1 file changed, 6 insertions(+), 15 deletions(-) diff --git a/sentry_sdk/integrations/graphene.py b/sentry_sdk/integrations/graphene.py index 828bb0ade5..42300e978f 100644 --- a/sentry_sdk/integrations/graphene.py +++ b/sentry_sdk/integrations/graphene.py @@ -140,19 +140,10 @@ def graphql_span(schema, source, kwargs): }, ) - scope = sentry_sdk.get_current_scope() - if scope.span: - _graphql_span = scope.span.start_child(op=op, name=operation_name) - else: - _graphql_span = sentry_sdk.start_span( - op=op, name=operation_name, only_if_parent=True - ) - - _graphql_span.set_data("graphql.document", source) - _graphql_span.set_data("graphql.operation.name", operation_name) - _graphql_span.set_data("graphql.operation.type", operation_type) - - try: + with sentry_sdk.start_span( + op=op, name=operation_name, only_if_parent=True + ) as graphql_span: + graphql_span.set_data("graphql.document", source) + graphql_span.set_data("graphql.operation.name", operation_name) + graphql_span.set_data("graphql.operation.type", operation_type) yield - finally: - _graphql_span.finish() From 92f53910d54eb6f78632b689980a6a29474a9e6a Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Wed, 18 Dec 2024 13:46:02 +0530 Subject: [PATCH 152/244] Fix strawberry integration (#3874) --- sentry_sdk/integrations/strawberry.py | 69 ++++++++----------- .../strawberry/test_strawberry.py | 4 +- 2 files changed, 32 insertions(+), 41 deletions(-) diff --git a/sentry_sdk/integrations/strawberry.py b/sentry_sdk/integrations/strawberry.py index bf174e9d99..dc96676538 100644 --- a/sentry_sdk/integrations/strawberry.py +++ b/sentry_sdk/integrations/strawberry.py @@ -179,59 +179,50 @@ def on_operation(self): }, ) - span = sentry_sdk.get_current_span() - if span: - self.graphql_span = span.start_child( - op=op, - name=description, - origin=StrawberryIntegration.origin, - ) - else: - self.graphql_span = sentry_sdk.start_span( - op=op, - name=description, - origin=StrawberryIntegration.origin, - only_if_parent=True, - ) + with sentry_sdk.start_span( + op=op, + name=description, + origin=StrawberryIntegration.origin, + only_if_parent=True, + ) as graphql_span: + graphql_span.set_data("graphql.operation.type", operation_type) + graphql_span.set_data("graphql.document", self.execution_context.query) + graphql_span.set_data("graphql.resource_name", self._resource_name) + + yield - self.graphql_span.set_data("graphql.operation.type", operation_type) - self.graphql_span.set_data("graphql.operation.name", self._operation_name) - self.graphql_span.set_data("graphql.document", self.execution_context.query) - self.graphql_span.set_data("graphql.resource_name", self._resource_name) + # we might have a more accurate operation_name after the parsing + self._operation_name = self.execution_context.operation_name - yield + if self._operation_name is not None: + graphql_span.set_data("graphql.operation.name", self._operation_name) - transaction = self.graphql_span.containing_transaction - if transaction and self.execution_context.operation_name: - transaction.name = self.execution_context.operation_name - transaction.source = TRANSACTION_SOURCE_COMPONENT - transaction.op = op + sentry_sdk.get_current_scope().set_transaction_name( + self._operation_name, + source=TRANSACTION_SOURCE_COMPONENT, + ) - self.graphql_span.finish() + root_span = graphql_span.root_span + if root_span: + root_span.op = op def on_validate(self): # type: () -> Generator[None, None, None] - self.validation_span = self.graphql_span.start_child( + with sentry_sdk.start_span( op=OP.GRAPHQL_VALIDATE, name="validation", origin=StrawberryIntegration.origin, - ) - - yield - - self.validation_span.finish() + ): + yield def on_parse(self): # type: () -> Generator[None, None, None] - self.parsing_span = self.graphql_span.start_child( + with sentry_sdk.start_span( op=OP.GRAPHQL_PARSE, name="parsing", origin=StrawberryIntegration.origin, - ) - - yield - - self.parsing_span.finish() + ): + yield def should_skip_tracing(self, _next, info): # type: (Callable[[Any, GraphQLResolveInfo, Any, Any], Any], GraphQLResolveInfo) -> bool @@ -253,7 +244,7 @@ async def resolve(self, _next, root, info, *args, **kwargs): field_path = "{}.{}".format(info.parent_type, info.field_name) - with self.graphql_span.start_child( + with sentry_sdk.start_span( op=OP.GRAPHQL_RESOLVE, name="resolving {}".format(field_path), origin=StrawberryIntegration.origin, @@ -274,7 +265,7 @@ def resolve(self, _next, root, info, *args, **kwargs): field_path = "{}.{}".format(info.parent_type, info.field_name) - with self.graphql_span.start_child( + with sentry_sdk.start_span( op=OP.GRAPHQL_RESOLVE, name="resolving {}".format(field_path), origin=StrawberryIntegration.origin, diff --git a/tests/integrations/strawberry/test_strawberry.py b/tests/integrations/strawberry/test_strawberry.py index 7b40b238d2..fdf7ff71bb 100644 --- a/tests/integrations/strawberry/test_strawberry.py +++ b/tests/integrations/strawberry/test_strawberry.py @@ -505,7 +505,7 @@ def test_transaction_no_operation_name( query_span = query_spans[0] assert query_span["description"] == "query" assert query_span["data"]["graphql.operation.type"] == "query" - assert query_span["data"]["graphql.operation.name"] is None + assert "graphql.operation.name" not in query_span["data"] assert query_span["data"]["graphql.document"] == query assert query_span["data"]["graphql.resource_name"] @@ -582,7 +582,7 @@ def test_transaction_mutation( query_span = query_spans[0] assert query_span["description"] == "mutation" assert query_span["data"]["graphql.operation.type"] == "mutation" - assert query_span["data"]["graphql.operation.name"] is None + assert query_span["data"]["graphql.operation.name"] == "Change" assert query_span["data"]["graphql.document"] == query assert query_span["data"]["graphql.resource_name"] From 60d6333efaf1332e653ef653195907fb2d211a43 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Wed, 18 Dec 2024 09:30:17 +0100 Subject: [PATCH 153/244] Fix arq tests in POTel (#3875) Make sure OK status is set, only when there has not been a error status set before. --- sentry_sdk/integrations/arq.py | 7 +++- sentry_sdk/tracing.py | 19 +++++++++++ .../integrations/opentelemetry/test_potel.py | 33 +++++++++++++++++++ 3 files changed, 58 insertions(+), 1 deletion(-) diff --git a/sentry_sdk/integrations/arq.py b/sentry_sdk/integrations/arq.py index 5aa0ba7302..c26db4520c 100644 --- a/sentry_sdk/integrations/arq.py +++ b/sentry_sdk/integrations/arq.py @@ -1,5 +1,7 @@ import sys +from opentelemetry.trace.status import StatusCode + import sentry_sdk from sentry_sdk.consts import OP, SPANSTATUS from sentry_sdk.integrations import DidNotEnable, Integration @@ -116,7 +118,10 @@ async def _sentry_run_job(self, job_id, score): origin=ArqIntegration.origin, ) as span: return_value = await old_run_job(self, job_id, score) - span.set_status(SPANSTATUS.OK) + + if span.status is None: + span.set_status(SPANSTATUS.OK) + return return_value Worker.run_job = _sentry_run_job diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py index 7686dcf052..a0b9439dc8 100644 --- a/sentry_sdk/tracing.py +++ b/sentry_sdk/tracing.py @@ -1583,6 +1583,25 @@ def set_attribute(self, key, value): self._otel_span.set_attribute(key, _serialize_span_attribute(value)) + @property + def status(self): + # type: () -> Optional[str] + """ + Return the Sentry `SPANSTATUS` corresponding to the underlying OTel status. + Because differences in possible values in OTel `StatusCode` and + Sentry `SPANSTATUS` it can not be guaranteed that the status + set in `set_status()` will be the same as the one returned here. + """ + if not hasattr(self._otel_span, "status"): + return None + + if self._otel_span.status.status_code == StatusCode.UNSET: + return None + elif self._otel_span.status.status_code == StatusCode.OK: + return SPANSTATUS.OK + else: + return SPANSTATUS.UNKNOWN_ERROR + def set_status(self, status): # type: (str) -> None if status == SPANSTATUS.OK: diff --git a/tests/integrations/opentelemetry/test_potel.py b/tests/integrations/opentelemetry/test_potel.py index 39c48f8cc8..2d1d66c6d0 100644 --- a/tests/integrations/opentelemetry/test_potel.py +++ b/tests/integrations/opentelemetry/test_potel.py @@ -2,6 +2,7 @@ from opentelemetry import trace import sentry_sdk +from sentry_sdk.consts import SPANSTATUS from tests.conftest import ApproxDict @@ -331,3 +332,35 @@ def test_potel_span_root_span_references(): with sentry_sdk.start_span(description="http") as http_span: assert not http_span.is_root_span assert http_span.root_span == request_span + + +@pytest.mark.parametrize( + "status_in,status_out", + [ + (None, None), + ("", SPANSTATUS.UNKNOWN_ERROR), + (SPANSTATUS.OK, SPANSTATUS.OK), + (SPANSTATUS.ABORTED, SPANSTATUS.UNKNOWN_ERROR), + (SPANSTATUS.ALREADY_EXISTS, SPANSTATUS.UNKNOWN_ERROR), + (SPANSTATUS.CANCELLED, SPANSTATUS.UNKNOWN_ERROR), + (SPANSTATUS.DATA_LOSS, SPANSTATUS.UNKNOWN_ERROR), + (SPANSTATUS.DEADLINE_EXCEEDED, SPANSTATUS.UNKNOWN_ERROR), + (SPANSTATUS.FAILED_PRECONDITION, SPANSTATUS.UNKNOWN_ERROR), + (SPANSTATUS.INTERNAL_ERROR, SPANSTATUS.UNKNOWN_ERROR), + (SPANSTATUS.INVALID_ARGUMENT, SPANSTATUS.UNKNOWN_ERROR), + (SPANSTATUS.NOT_FOUND, SPANSTATUS.UNKNOWN_ERROR), + (SPANSTATUS.OUT_OF_RANGE, SPANSTATUS.UNKNOWN_ERROR), + (SPANSTATUS.PERMISSION_DENIED, SPANSTATUS.UNKNOWN_ERROR), + (SPANSTATUS.RESOURCE_EXHAUSTED, SPANSTATUS.UNKNOWN_ERROR), + (SPANSTATUS.UNAUTHENTICATED, SPANSTATUS.UNKNOWN_ERROR), + (SPANSTATUS.UNAVAILABLE, SPANSTATUS.UNKNOWN_ERROR), + (SPANSTATUS.UNIMPLEMENTED, SPANSTATUS.UNKNOWN_ERROR), + (SPANSTATUS.UNKNOWN_ERROR, SPANSTATUS.UNKNOWN_ERROR), + ], +) +def test_potel_span_status(status_in, status_out): + span = sentry_sdk.start_span(name="test") + if status_in is not None: + span.set_status(status_in) + + assert span.status == status_out From f6e91621ae772fb28b1b29df4df47236f3959c2d Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Wed, 18 Dec 2024 10:11:09 +0100 Subject: [PATCH 154/244] Fix ray tests (#3877) Make sure there is a transaction name --- sentry_sdk/integrations/ray.py | 17 +++++++--- tests/integrations/ray/test_ray.py | 50 ++++++++++++++++-------------- 2 files changed, 39 insertions(+), 28 deletions(-) diff --git a/sentry_sdk/integrations/ray.py b/sentry_sdk/integrations/ray.py index 0290bdf1ef..0503d27bdb 100644 --- a/sentry_sdk/integrations/ray.py +++ b/sentry_sdk/integrations/ray.py @@ -26,6 +26,8 @@ from typing import Any, Optional from sentry_sdk.utils import ExcInfo +DEFAULT_TRANSACTION_NAME = "unknown Ray function" + def _check_sentry_initialized(): # type: () -> None @@ -58,18 +60,23 @@ def _f(*f_args, _tracing=None, **f_kwargs): """ _check_sentry_initialized() + root_span_name = qualname_from_function(f) or DEFAULT_TRANSACTION_NAME + sentry_sdk.get_current_scope().set_transaction_name( + root_span_name, + source=TRANSACTION_SOURCE_TASK, + ) with sentry_sdk.continue_trace(_tracing or {}): - with sentry_sdk.start_transaction( + with sentry_sdk.start_span( op=OP.QUEUE_TASK_RAY, - name=qualname_from_function(f) or "unknown Ray function", + name=root_span_name, origin=RayIntegration.origin, source=TRANSACTION_SOURCE_TASK, - ) as transaction: + ) as root_span: try: result = f(*f_args, **f_kwargs) - transaction.set_status(SPANSTATUS.OK) + root_span.set_status(SPANSTATUS.OK) except Exception: - transaction.set_status(SPANSTATUS.INTERNAL_ERROR) + root_span.set_status(SPANSTATUS.INTERNAL_ERROR) exc_info = sys.exc_info() _capture_exception(exc_info) reraise(*exc_info) diff --git a/tests/integrations/ray/test_ray.py b/tests/integrations/ray/test_ray.py index 95ab4ad0fa..a8c752269a 100644 --- a/tests/integrations/ray/test_ray.py +++ b/tests/integrations/ray/test_ray.py @@ -77,42 +77,42 @@ def example_task(): return sentry_sdk.get_client().transport.envelopes - with sentry_sdk.start_transaction(op="task", name="ray test transaction"): + with sentry_sdk.start_span(op="test", name="ray client root span"): worker_envelopes = ray.get(example_task.remote()) client_envelope = sentry_sdk.get_client().transport.envelopes[0] - client_transaction = client_envelope.get_transaction_event() - assert client_transaction["transaction"] == "ray test transaction" - assert client_transaction["transaction_info"] == {"source": "custom"} + client_root_span = client_envelope.get_transaction_event() + assert client_root_span["transaction"] == "ray client root span" + assert client_root_span["transaction_info"] == {"source": "custom"} worker_envelope = worker_envelopes[0] - worker_transaction = worker_envelope.get_transaction_event() + worker_root_span = worker_envelope.get_transaction_event() assert ( - worker_transaction["transaction"] + worker_root_span["transaction"] == "tests.integrations.ray.test_ray.test_tracing_in_ray_tasks..example_task" ) - assert worker_transaction["transaction_info"] == {"source": "task"} + assert worker_root_span["transaction_info"] == {"source": "task"} - (span,) = client_transaction["spans"] + (span,) = client_root_span["spans"] assert span["op"] == "queue.submit.ray" assert span["origin"] == "auto.queue.ray" assert ( span["description"] == "tests.integrations.ray.test_ray.test_tracing_in_ray_tasks..example_task" ) - assert span["parent_span_id"] == client_transaction["contexts"]["trace"]["span_id"] - assert span["trace_id"] == client_transaction["contexts"]["trace"]["trace_id"] + assert span["parent_span_id"] == client_root_span["contexts"]["trace"]["span_id"] + assert span["trace_id"] == client_root_span["contexts"]["trace"]["trace_id"] - (span,) = worker_transaction["spans"] + (span,) = worker_root_span["spans"] assert span["op"] == "task" assert span["origin"] == "manual" assert span["description"] == "example task step" - assert span["parent_span_id"] == worker_transaction["contexts"]["trace"]["span_id"] - assert span["trace_id"] == worker_transaction["contexts"]["trace"]["trace_id"] + assert span["parent_span_id"] == worker_root_span["contexts"]["trace"]["span_id"] + assert span["trace_id"] == worker_root_span["contexts"]["trace"]["trace_id"] assert ( - client_transaction["contexts"]["trace"]["trace_id"] - == worker_transaction["contexts"]["trace"]["trace_id"] + client_root_span["contexts"]["trace"]["trace_id"] + == worker_root_span["contexts"]["trace"]["trace_id"] ) @@ -132,7 +132,7 @@ def test_errors_in_ray_tasks(): def example_task(): 1 / 0 - with sentry_sdk.start_transaction(op="task", name="ray test transaction"): + with sentry_sdk.start_span(op="test", name="ray client root span"): with pytest.raises(ZeroDivisionError): future = example_task.remote() ray.get(future) @@ -167,22 +167,24 @@ def __init__(self): self.n = 0 def increment(self): - with sentry_sdk.start_span(op="task", name="example actor execution"): + with sentry_sdk.start_span( + op="test", name="custom span in actor execution", only_if_parent=True + ): self.n += 1 return sentry_sdk.get_client().transport.envelopes - with sentry_sdk.start_transaction(op="task", name="ray test transaction"): + with sentry_sdk.start_span(op="test", name="ray client root span"): counter = Counter.remote() worker_envelopes = ray.get(counter.increment.remote()) client_envelope = sentry_sdk.get_client().transport.envelopes[0] - client_transaction = client_envelope.get_transaction_event() + client_root_span = client_envelope.get_transaction_event() # Spans for submitting the actor task are not created (actors are not supported yet) - assert client_transaction["spans"] == [] + assert client_root_span["spans"] == [] - # Transaction are not yet created when executing ray actors (actors are not supported yet) + # Root spans are not yet automatically created when executing ray actors (actors are not supported yet) assert worker_envelopes == [] @@ -204,12 +206,14 @@ def __init__(self): self.n = 0 def increment(self): - with sentry_sdk.start_span(op="task", name="example actor execution"): + with sentry_sdk.start_span( + op="test", name="custom span in actor execution", only_if_parent=True + ): 1 / 0 return sentry_sdk.get_client().transport.envelopes - with sentry_sdk.start_transaction(op="task", name="ray test transaction"): + with sentry_sdk.start_span(op="test", name="ray client root span"): with pytest.raises(ZeroDivisionError): counter = Counter.remote() future = counter.increment.remote() From 7fca789025ecc6980d537502027f417c55d5c8c2 Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Wed, 18 Dec 2024 17:30:45 +0530 Subject: [PATCH 155/244] Ignore opentelemetry logger in logging integration (#3880) Without this, internal otel logs (especially `logger.exception`s) will show up as events / breadcrumbs in the payload. --- sentry_sdk/integrations/logging.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sentry_sdk/integrations/logging.py b/sentry_sdk/integrations/logging.py index b792510d6c..314780cabd 100644 --- a/sentry_sdk/integrations/logging.py +++ b/sentry_sdk/integrations/logging.py @@ -40,7 +40,7 @@ # Note: Ignoring by logger name here is better than mucking with thread-locals. # We do not necessarily know whether thread-locals work 100% correctly in the user's environment. _IGNORED_LOGGERS = set( - ["sentry_sdk.errors", "urllib3.connectionpool", "urllib3.connection"] + ["sentry_sdk.errors", "urllib3.connectionpool", "urllib3.connection", "opentelemetry.*"] ) From 858cacdcfe9d702c9f7ef1ecde83b098c91b2891 Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Wed, 18 Dec 2024 20:41:55 +0530 Subject: [PATCH 156/244] Implement span limits on span processor (#3881) --- sentry_sdk/integrations/arq.py | 2 -- sentry_sdk/integrations/logging.py | 7 ++++++- .../opentelemetry/span_processor.py | 19 ++++++++++++++++++- tests/tracing/test_misc.py | 19 +++++++++++++++++++ 4 files changed, 43 insertions(+), 4 deletions(-) diff --git a/sentry_sdk/integrations/arq.py b/sentry_sdk/integrations/arq.py index c26db4520c..0f42050cf2 100644 --- a/sentry_sdk/integrations/arq.py +++ b/sentry_sdk/integrations/arq.py @@ -1,7 +1,5 @@ import sys -from opentelemetry.trace.status import StatusCode - import sentry_sdk from sentry_sdk.consts import OP, SPANSTATUS from sentry_sdk.integrations import DidNotEnable, Integration diff --git a/sentry_sdk/integrations/logging.py b/sentry_sdk/integrations/logging.py index 314780cabd..52c56a8e60 100644 --- a/sentry_sdk/integrations/logging.py +++ b/sentry_sdk/integrations/logging.py @@ -40,7 +40,12 @@ # Note: Ignoring by logger name here is better than mucking with thread-locals. # We do not necessarily know whether thread-locals work 100% correctly in the user's environment. _IGNORED_LOGGERS = set( - ["sentry_sdk.errors", "urllib3.connectionpool", "urllib3.connection", "opentelemetry.*"] + [ + "sentry_sdk.errors", + "urllib3.connectionpool", + "urllib3.connection", + "opentelemetry.*", + ] ) diff --git a/sentry_sdk/integrations/opentelemetry/span_processor.py b/sentry_sdk/integrations/opentelemetry/span_processor.py index 0b4c3387df..42ad32a5ea 100644 --- a/sentry_sdk/integrations/opentelemetry/span_processor.py +++ b/sentry_sdk/integrations/opentelemetry/span_processor.py @@ -43,6 +43,9 @@ from sentry_sdk._types import Event +DEFAULT_MAX_SPANS = 1000 + + class SentrySpanProcessor(SpanProcessor): """ Converts OTel spans into Sentry spans so they can be sent to the Sentry backend. @@ -79,7 +82,7 @@ def on_end(self, span): # if have a root span ending, we build a transaction and send it self._flush_root_span(span) else: - self._children_spans[span.parent.span_id].append(span) + self._append_child_span(span) # TODO-neel-potel not sure we need a clear like JS def shutdown(self): @@ -150,6 +153,20 @@ def _flush_root_span(self, span): sentry_sdk.capture_event(transaction_event) + def _append_child_span(self, span): + # type: (ReadableSpan) -> None + if not span.parent: + return + + max_spans = ( + sentry_sdk.get_client().options["_experiments"].get("max_spans") + or DEFAULT_MAX_SPANS + ) + + children_spans = self._children_spans[span.parent.span_id] + if len(children_spans) < max_spans: + children_spans.append(span) + def _collect_children(self, span): # type: (ReadableSpan) -> List[ReadableSpan] if not span.context: diff --git a/tests/tracing/test_misc.py b/tests/tracing/test_misc.py index 0d12acc617..dc5754cfd2 100644 --- a/tests/tracing/test_misc.py +++ b/tests/tracing/test_misc.py @@ -8,6 +8,25 @@ from sentry_sdk.utils import Dsn +def test_span_trimming(sentry_init, capture_events): + sentry_init(traces_sample_rate=1.0, _experiments={"max_spans": 3}) + events = capture_events() + + with start_span(name="hi"): + for i in range(10): + with start_span(op="foo{}".format(i)): + pass + + (event,) = events + + assert len(event["spans"]) == 3 + + span1, span2, span3 = event["spans"] + assert span1["op"] == "foo0" + assert span2["op"] == "foo1" + assert span3["op"] == "foo2" + + def test_transaction_naming(sentry_init, capture_events): sentry_init(traces_sample_rate=1.0) events = capture_events() From 9e64b1d4d123a84b91fab66bc667390752e0e492 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Fri, 20 Dec 2024 14:05:46 +0100 Subject: [PATCH 157/244] Fixed OpenAI tests (#3738) --- tests/integrations/openai/test_openai.py | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/tests/integrations/openai/test_openai.py b/tests/integrations/openai/test_openai.py index 011192e49f..1d5ce7a2b1 100644 --- a/tests/integrations/openai/test_openai.py +++ b/tests/integrations/openai/test_openai.py @@ -83,8 +83,8 @@ def test_nonstreaming_chat_completion( assert span["op"] == "ai.chat_completions.create.openai" if send_default_pii and include_prompts: - assert "hello" in span["data"]["ai.input_messages"]["content"] - assert "the model response" in span["data"]["ai.responses"]["content"] + assert '"content": "hello"' in span["data"]["ai.input_messages"] + assert '"content": "the model response"' in span["data"]["ai.responses"] else: assert "ai.input_messages" not in span["data"] assert "ai.responses" not in span["data"] @@ -125,8 +125,8 @@ async def test_nonstreaming_chat_completion_async( assert span["op"] == "ai.chat_completions.create.openai" if send_default_pii and include_prompts: - assert "hello" in span["data"]["ai.input_messages"]["content"] - assert "the model response" in span["data"]["ai.responses"]["content"] + assert '"content": "hello"' in span["data"]["ai.input_messages"] + assert '"content": "the model response"' in span["data"]["ai.responses"] else: assert "ai.input_messages" not in span["data"] assert "ai.responses" not in span["data"] @@ -218,7 +218,7 @@ def test_streaming_chat_completion( assert span["op"] == "ai.chat_completions.create.openai" if send_default_pii and include_prompts: - assert "hello" in span["data"]["ai.input_messages"]["content"] + assert '"content": "hello"' in span["data"]["ai.input_messages"] assert "hello world" in span["data"]["ai.responses"] else: assert "ai.input_messages" not in span["data"] @@ -314,7 +314,7 @@ async def test_streaming_chat_completion_async( assert span["op"] == "ai.chat_completions.create.openai" if send_default_pii and include_prompts: - assert "hello" in span["data"]["ai.input_messages"]["content"] + assert '"content": "hello"' in span["data"]["ai.input_messages"] assert "hello world" in span["data"]["ai.responses"] else: assert "ai.input_messages" not in span["data"] @@ -330,6 +330,7 @@ async def test_streaming_chat_completion_async( pass # if tiktoken is not installed, we can't guarantee token usage will be calculated properly +@pytest.mark.forked def test_bad_chat_completion(sentry_init, capture_events): sentry_init(integrations=[OpenAIIntegration()], traces_sample_rate=1.0) events = capture_events() @@ -460,6 +461,7 @@ async def test_embeddings_create_async( assert span["measurements"]["ai_total_tokens_used"]["value"] == 30 +@pytest.mark.forked @pytest.mark.parametrize( "send_default_pii, include_prompts", [(True, True), (True, False), (False, True), (False, False)], @@ -487,6 +489,7 @@ def test_embeddings_create_raises_error( assert event["level"] == "error" +@pytest.mark.forked @pytest.mark.asyncio @pytest.mark.parametrize( "send_default_pii, include_prompts", From e5558a6549630be1e7dd4f36e0bf0942d49287c3 Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Thu, 9 Jan 2025 14:59:23 +0100 Subject: [PATCH 158/244] Cleanup start_transaction usages (#3884) --- sentry_sdk/integrations/aws_lambda.py | 2 +- sentry_sdk/integrations/celery/__init__.py | 2 +- sentry_sdk/integrations/gcp.py | 2 +- sentry_sdk/integrations/grpc/aio/server.py | 2 +- sentry_sdk/integrations/grpc/server.py | 2 +- sentry_sdk/integrations/tornado.py | 2 +- sentry_sdk/utils.py | 2 +- tests/integrations/aiohttp/test_aiohttp.py | 10 +++---- .../integrations/anthropic/test_anthropic.py | 18 ++++++------ tests/integrations/arq/test_arq.py | 6 ++-- tests/integrations/celery/test_celery.py | 28 +++++++++---------- .../celery/test_update_celery_task_headers.py | 4 +-- .../test_clickhouse_driver.py | 12 ++++---- tests/integrations/cohere/test_cohere.py | 12 ++++---- .../integrations/django/test_cache_module.py | 4 +-- .../integrations/django/test_db_query_data.py | 4 +-- .../huggingface_hub/test_huggingface_hub.py | 8 +++--- .../integrations/langchain/test_langchain.py | 8 +++--- tests/integrations/openai/test_openai.py | 26 ++++++++--------- .../redis/test_redis_cache_module.py | 10 +++---- .../test_redis_py_cluster_legacy.py | 8 +++--- tests/integrations/socket/test_socket.py | 8 +++--- tests/integrations/tornado/test_tornado.py | 6 ++-- tests/profiler/test_continuous_profiler.py | 10 +++---- tests/profiler/test_transaction_profiler.py | 10 +++---- tests/test_ai_monitoring.py | 8 +++--- tests/test_basics.py | 14 +++++----- tests/test_scope.py | 4 +-- tests/test_scrubber.py | 4 +-- 29 files changed, 118 insertions(+), 118 deletions(-) diff --git a/sentry_sdk/integrations/aws_lambda.py b/sentry_sdk/integrations/aws_lambda.py index cd2b3cc417..8899cc53b2 100644 --- a/sentry_sdk/integrations/aws_lambda.py +++ b/sentry_sdk/integrations/aws_lambda.py @@ -160,7 +160,7 @@ def sentry_handler(aws_event, aws_context, *args, **kwargs): headers = {} with sentry_sdk.continue_trace(headers): - with sentry_sdk.start_transaction( + with sentry_sdk.start_span( op=OP.FUNCTION_AWS, name=aws_context.function_name, source=TRANSACTION_SOURCE_COMPONENT, diff --git a/sentry_sdk/integrations/celery/__init__.py b/sentry_sdk/integrations/celery/__init__.py index a943871335..43321ec89b 100644 --- a/sentry_sdk/integrations/celery/__init__.py +++ b/sentry_sdk/integrations/celery/__init__.py @@ -313,7 +313,7 @@ def _inner(*args, **kwargs): # something such as attribute access can fail. headers = args[3].get("headers") or {} with sentry_sdk.continue_trace(headers): - with sentry_sdk.start_transaction( + with sentry_sdk.start_span( op=OP.QUEUE_TASK_CELERY, name=task.name, source=TRANSACTION_SOURCE_TASK, diff --git a/sentry_sdk/integrations/gcp.py b/sentry_sdk/integrations/gcp.py index dd23ad1e0a..ec626ed699 100644 --- a/sentry_sdk/integrations/gcp.py +++ b/sentry_sdk/integrations/gcp.py @@ -87,7 +87,7 @@ def sentry_func(functionhandler, gcp_event, *args, **kwargs): headers = gcp_event.headers with sentry_sdk.continue_trace(headers): - with sentry_sdk.start_transaction( + with sentry_sdk.start_span( op=OP.FUNCTION_GCP, name=environ.get("FUNCTION_NAME", ""), source=TRANSACTION_SOURCE_COMPONENT, diff --git a/sentry_sdk/integrations/grpc/aio/server.py b/sentry_sdk/integrations/grpc/aio/server.py index 6d38e91363..4d54b0605c 100644 --- a/sentry_sdk/integrations/grpc/aio/server.py +++ b/sentry_sdk/integrations/grpc/aio/server.py @@ -45,7 +45,7 @@ async def wrapped(request, context): # What if the headers are empty? with sentry_sdk.continue_trace(dict(context.invocation_metadata())): - with sentry_sdk.start_transaction( + with sentry_sdk.start_span( op=OP.GRPC_SERVER, name=name, source=TRANSACTION_SOURCE_CUSTOM, diff --git a/sentry_sdk/integrations/grpc/server.py b/sentry_sdk/integrations/grpc/server.py index fb123c5ca4..d12b43b92b 100644 --- a/sentry_sdk/integrations/grpc/server.py +++ b/sentry_sdk/integrations/grpc/server.py @@ -39,7 +39,7 @@ def behavior(request, context): metadata = dict(context.invocation_metadata()) with sentry_sdk.continue_trace(metadata): - with sentry_sdk.start_transaction( + with sentry_sdk.start_span( op=OP.GRPC_SERVER, name=name, source=TRANSACTION_SOURCE_CUSTOM, diff --git a/sentry_sdk/integrations/tornado.py b/sentry_sdk/integrations/tornado.py index bb40fbf625..1af8950aa4 100644 --- a/sentry_sdk/integrations/tornado.py +++ b/sentry_sdk/integrations/tornado.py @@ -125,7 +125,7 @@ def _handle_request_impl(self): scope.add_event_processor(processor) with sentry_sdk.continue_trace(headers): - with sentry_sdk.start_transaction( + with sentry_sdk.start_span( op=OP.HTTP_SERVER, # Like with all other integrations, this is our # fallback transaction in case there is no route. diff --git a/sentry_sdk/utils.py b/sentry_sdk/utils.py index 16dfd9c4fa..403ac84220 100644 --- a/sentry_sdk/utils.py +++ b/sentry_sdk/utils.py @@ -1802,7 +1802,7 @@ def ensure_integration_enabled( ```python @ensure_integration_enabled(MyIntegration, my_function) def patch_my_function(): - with sentry_sdk.start_transaction(...): + with sentry_sdk.start_span(...): return my_function() ``` """ diff --git a/tests/integrations/aiohttp/test_aiohttp.py b/tests/integrations/aiohttp/test_aiohttp.py index 4b491b152e..7960bc2dcc 100644 --- a/tests/integrations/aiohttp/test_aiohttp.py +++ b/tests/integrations/aiohttp/test_aiohttp.py @@ -15,7 +15,7 @@ HTTPUnavailableForLegalReasons, ) -from sentry_sdk import capture_message, start_transaction +from sentry_sdk import capture_message, start_span from sentry_sdk.integrations.aiohttp import AioHttpIntegration from tests.conftest import ApproxDict @@ -417,7 +417,7 @@ async def hello(request): # The aiohttp_client is instrumented so will generate the sentry-trace header and add request. # Get the sentry-trace header from the request so we can later compare with transaction events. client = await aiohttp_client(app) - with start_transaction(): + with start_span(name="request"): # Headers are only added to the span if there is an active transaction resp = await client.get("/") @@ -496,7 +496,7 @@ async def handler(request): raw_server = await aiohttp_raw_server(handler) - with start_transaction(): + with start_span(name="breadcrumb"): events = capture_events() client = await aiohttp_client(raw_server) @@ -538,7 +538,7 @@ async def handler(request): raw_server = await aiohttp_raw_server(handler) - with start_transaction( + with start_span( name="/interactions/other-dogs/new-dog", op="greeting.sniff", ) as transaction: @@ -573,7 +573,7 @@ async def handler(request): raw_server = await aiohttp_raw_server(handler) - with start_transaction( + with start_span( name="/interactions/other-dogs/new-dog", op="greeting.sniff", ) as transaction: diff --git a/tests/integrations/anthropic/test_anthropic.py b/tests/integrations/anthropic/test_anthropic.py index 4a7d7ed458..ece2cfe7a3 100644 --- a/tests/integrations/anthropic/test_anthropic.py +++ b/tests/integrations/anthropic/test_anthropic.py @@ -42,7 +42,7 @@ async def __call__(self, *args, **kwargs): except ImportError: from anthropic.types.content_block import ContentBlock as TextBlock -from sentry_sdk import start_transaction +from sentry_sdk import start_span from sentry_sdk.consts import OP, SPANDATA from sentry_sdk.integrations.anthropic import AnthropicIntegration @@ -90,7 +90,7 @@ def test_nonstreaming_create_message( } ] - with start_transaction(name="anthropic"): + with start_span(name="anthropic"): response = client.messages.create( max_tokens=1024, messages=messages, model="model" ) @@ -160,7 +160,7 @@ async def test_nonstreaming_create_message_async( } ] - with start_transaction(name="anthropic"): + with start_span(name="anthropic"): response = await client.messages.create( max_tokens=1024, messages=messages, model="model" ) @@ -263,7 +263,7 @@ def test_streaming_create_message( } ] - with start_transaction(name="anthropic"): + with start_span(name="anthropic"): message = client.messages.create( max_tokens=1024, messages=messages, model="model", stream=True ) @@ -368,7 +368,7 @@ async def test_streaming_create_message_async( } ] - with start_transaction(name="anthropic"): + with start_span(name="anthropic"): message = await client.messages.create( max_tokens=1024, messages=messages, model="model", stream=True ) @@ -500,7 +500,7 @@ def test_streaming_create_message_with_input_json_delta( } ] - with start_transaction(name="anthropic"): + with start_span(name="anthropic"): message = client.messages.create( max_tokens=1024, messages=messages, model="model", stream=True ) @@ -639,7 +639,7 @@ async def test_streaming_create_message_with_input_json_delta_async( } ] - with start_transaction(name="anthropic"): + with start_span(name="anthropic"): message = await client.messages.create( max_tokens=1024, messages=messages, model="model", stream=True ) @@ -736,7 +736,7 @@ def test_span_origin(sentry_init, capture_events): } ] - with start_transaction(name="anthropic"): + with start_span(name="anthropic"): client.messages.create(max_tokens=1024, messages=messages, model="model") (event,) = events @@ -763,7 +763,7 @@ async def test_span_origin_async(sentry_init, capture_events): } ] - with start_transaction(name="anthropic"): + with start_span(name="anthropic"): await client.messages.create(max_tokens=1024, messages=messages, model="model") (event,) = events diff --git a/tests/integrations/arq/test_arq.py b/tests/integrations/arq/test_arq.py index e74395e26c..0ebb97cd2b 100644 --- a/tests/integrations/arq/test_arq.py +++ b/tests/integrations/arq/test_arq.py @@ -1,7 +1,7 @@ import asyncio import pytest -from sentry_sdk import get_client, start_transaction +from sentry_sdk import get_client, start_span from sentry_sdk.integrations.arq import ArqIntegration import arq.worker @@ -292,7 +292,7 @@ async def dummy_job(_): events = capture_events() - with start_transaction() as transaction: + with start_span(name="test") as transaction: await pool.enqueue_job("dummy_job") (event,) = events @@ -343,7 +343,7 @@ async def dummy_job(_): events = capture_events() - with start_transaction(): + with start_span(name="job"): await pool.enqueue_job("dummy_job") (event,) = events diff --git a/tests/integrations/celery/test_celery.py b/tests/integrations/celery/test_celery.py index 1011429098..2f8de6968a 100644 --- a/tests/integrations/celery/test_celery.py +++ b/tests/integrations/celery/test_celery.py @@ -7,7 +7,7 @@ from celery.bin import worker import sentry_sdk -from sentry_sdk import start_transaction, get_current_span +from sentry_sdk import start_span, get_current_span from sentry_sdk.integrations.celery import ( CeleryIntegration, _wrap_task_run, @@ -126,7 +126,7 @@ def dummy_task(x, y): foo = 42 # noqa return x / y - with start_transaction(op="unit test transaction") as transaction: + with start_span(op="unit test transaction") as transaction: celery_invocation(dummy_task, 1, 2) _, expected_context = celery_invocation(dummy_task, 1, 0) @@ -195,7 +195,7 @@ def dummy_task(x, y): events = capture_events() - with start_transaction(name="submission") as transaction: + with start_span(name="submission") as transaction: celery_invocation(dummy_task, 1, 0 if task_fails else 1) if task_fails: @@ -275,11 +275,11 @@ def test_simple_no_propagation(capture_events, init_celery): def dummy_task(): 1 / 0 - with start_transaction() as transaction: + with start_span(name="task") as root_span: dummy_task.delay() (event,) = events - assert event["contexts"]["trace"]["trace_id"] != transaction.trace_id + assert event["contexts"]["trace"]["trace_id"] != root_span.trace_id assert event["transaction"] == "dummy_task" (exception,) = event["exception"]["values"] assert exception["type"] == "ZeroDivisionError" @@ -350,7 +350,7 @@ def dummy_task(self): runs.append(1) 1 / 0 - with start_transaction(name="submit_celery"): + with start_span(name="submit_celery"): # Curious: Cannot use delay() here or py2.7-celery-4.2 crashes res = dummy_task.apply_async() @@ -469,7 +469,7 @@ def __call__(self, *args, **kwargs): def dummy_task(x, y): return x / y - with start_transaction(): + with start_span(name="celery"): celery_invocation(dummy_task, 1, 0) assert not events @@ -510,7 +510,7 @@ def test_baggage_propagation(init_celery): def dummy_task(self, x, y): return _get_headers(self) - with start_transaction() as transaction: + with start_span(name="task") as root_span: result = dummy_task.apply_async( args=(1, 0), headers={"baggage": "custom=value"}, @@ -519,7 +519,7 @@ def dummy_task(self, x, y): assert sorted(result["baggage"].split(",")) == sorted( [ "sentry-release=abcdef", - "sentry-trace_id={}".format(transaction.trace_id), + "sentry-trace_id={}".format(root_span.trace_id), "sentry-environment=production", "sentry-sample_rate=1.0", "sentry-sampled=true", @@ -542,8 +542,8 @@ def dummy_task(self, message): trace_id = get_current_span().trace_id return trace_id - with start_transaction() as transaction: - transaction_trace_id = transaction.trace_id + with start_span(name="task") as root_span: + transaction_trace_id = root_span.trace_id # should propagate trace task_transaction_id = dummy_task.apply_async( @@ -710,7 +710,7 @@ def publish(*args, **kwargs): @celery.task() def task(): ... - with start_transaction(): + with start_span(name="task"): task.apply_async() (event,) = events @@ -773,7 +773,7 @@ def publish(*args, **kwargs): @celery.task() def task(): ... - with start_transaction(name="custom_transaction"): + with start_span(name="custom_transaction"): task.apply_async() (event,) = events @@ -799,7 +799,7 @@ def test_send_task_wrapped( events = capture_events() - with sentry_sdk.start_transaction(name="custom_transaction"): + with sentry_sdk.start_span(name="custom_transaction"): celery.send_task("very_creative_task_name", args=(1, 2), kwargs={"foo": "bar"}) (call,) = patched_send_task.call_args_list # We should have exactly one call diff --git a/tests/integrations/celery/test_update_celery_task_headers.py b/tests/integrations/celery/test_update_celery_task_headers.py index 705c00de58..709e49b54a 100644 --- a/tests/integrations/celery/test_update_celery_task_headers.py +++ b/tests/integrations/celery/test_update_celery_task_headers.py @@ -75,7 +75,7 @@ def test_span_with_transaction(sentry_init): headers = {} monitor_beat_tasks = False - with sentry_sdk.start_transaction(name="test_transaction") as transaction: + with sentry_sdk.start_span(name="test_transaction") as transaction: with sentry_sdk.start_span(op="test_span") as span: outgoing_headers = _update_celery_task_headers( headers, span, monitor_beat_tasks @@ -97,7 +97,7 @@ def test_span_with_transaction_custom_headers(sentry_init): "sentry-trace": SENTRY_TRACE_VALUE, } - with sentry_sdk.start_transaction(name="test_transaction") as transaction: + with sentry_sdk.start_span(name="test_transaction") as transaction: with sentry_sdk.start_span(op="test_span") as span: outgoing_headers = _update_celery_task_headers(headers, span, False) diff --git a/tests/integrations/clickhouse_driver/test_clickhouse_driver.py b/tests/integrations/clickhouse_driver/test_clickhouse_driver.py index 2c3d3c41a4..7eb0462231 100644 --- a/tests/integrations/clickhouse_driver/test_clickhouse_driver.py +++ b/tests/integrations/clickhouse_driver/test_clickhouse_driver.py @@ -8,7 +8,7 @@ import clickhouse_driver from clickhouse_driver import Client, connect -from sentry_sdk import start_transaction, capture_message +from sentry_sdk import start_span, capture_message from sentry_sdk.integrations.clickhouse_driver import ClickhouseDriverIntegration from tests.conftest import ApproxDict @@ -227,7 +227,7 @@ def test_clickhouse_client_spans( transaction_trace_id = None transaction_span_id = None - with start_transaction(name="test_clickhouse_transaction") as transaction: + with start_span(name="test_clickhouse_transaction") as transaction: transaction_trace_id = transaction.trace_id transaction_span_id = transaction.span_id @@ -360,7 +360,7 @@ def test_clickhouse_client_spans_with_pii(sentry_init, capture_events) -> None: transaction_trace_id = None transaction_span_id = None - with start_transaction(name="test_clickhouse_transaction") as transaction: + with start_span(name="test_clickhouse_transaction") as transaction: transaction_trace_id = transaction.trace_id transaction_span_id = transaction.span_id @@ -701,7 +701,7 @@ def test_clickhouse_dbapi_spans(sentry_init, capture_events, capture_envelopes) transaction_trace_id = None transaction_span_id = None - with start_transaction(name="test_clickhouse_transaction") as transaction: + with start_span(name="test_clickhouse_transaction") as transaction: transaction_trace_id = transaction.trace_id transaction_span_id = transaction.span_id @@ -835,7 +835,7 @@ def test_clickhouse_dbapi_spans_with_pii( transaction_trace_id = None transaction_span_id = None - with start_transaction(name="test_clickhouse_transaction") as transaction: + with start_span(name="test_clickhouse_transaction") as transaction: transaction_trace_id = transaction.trace_id transaction_span_id = transaction.span_id @@ -975,7 +975,7 @@ def test_span_origin(sentry_init, capture_events, capture_envelopes) -> None: events = capture_events() - with start_transaction(name="test_clickhouse_transaction"): + with start_span(name="test_clickhouse_transaction"): conn = connect("clickhouse://localhost") cursor = conn.cursor() cursor.execute("SELECT 1") diff --git a/tests/integrations/cohere/test_cohere.py b/tests/integrations/cohere/test_cohere.py index 672d71b6b3..ff41ceba11 100644 --- a/tests/integrations/cohere/test_cohere.py +++ b/tests/integrations/cohere/test_cohere.py @@ -4,7 +4,7 @@ import pytest from cohere import Client, ChatMessage -from sentry_sdk import start_transaction +from sentry_sdk import start_span from sentry_sdk.integrations.cohere import CohereIntegration from unittest import mock # python 3.3 and above @@ -41,7 +41,7 @@ def test_nonstreaming_chat( ) ) - with start_transaction(name="cohere tx"): + with start_span(name="cohere tx"): response = client.chat( model="some-model", chat_history=[ChatMessage(role="SYSTEM", message="some context")], @@ -110,7 +110,7 @@ def test_streaming_chat(sentry_init, capture_events, send_default_pii, include_p ) ) - with start_transaction(name="cohere tx"): + with start_span(name="cohere tx"): responses = list( client.chat_stream( model="some-model", @@ -186,7 +186,7 @@ def test_embed(sentry_init, capture_events, send_default_pii, include_prompts): ) ) - with start_transaction(name="cohere tx"): + with start_span(name="cohere tx"): response = client.embed(texts=["hello"], model="text-embedding-3-large") assert len(response.embeddings[0]) == 3 @@ -227,7 +227,7 @@ def test_span_origin_chat(sentry_init, capture_events): ) ) - with start_transaction(name="cohere tx"): + with start_span(name="cohere tx"): client.chat( model="some-model", chat_history=[ChatMessage(role="SYSTEM", message="some context")], @@ -265,7 +265,7 @@ def test_span_origin_embed(sentry_init, capture_events): ) ) - with start_transaction(name="cohere tx"): + with start_span(name="cohere tx"): client.embed(texts=["hello"], model="text-embedding-3-large") (event,) = events diff --git a/tests/integrations/django/test_cache_module.py b/tests/integrations/django/test_cache_module.py index 03e4925ab0..2d8cc3d5d6 100644 --- a/tests/integrations/django/test_cache_module.py +++ b/tests/integrations/django/test_cache_module.py @@ -530,7 +530,7 @@ def test_cache_spans_get_many( from django.core.cache import cache - with sentry_sdk.start_transaction(name="caches"): + with sentry_sdk.start_span(name="caches"): cache.get_many([f"S{id}", f"S{id+1}"]) cache.set(f"S{id}", "Sensitive1") cache.get_many([f"S{id}", f"S{id+1}"]) @@ -574,7 +574,7 @@ def test_cache_spans_set_many( from django.core.cache import cache - with sentry_sdk.start_transaction(name="caches"): + with sentry_sdk.start_span(name="caches"): cache.set_many({f"S{id}": "Sensitive1", f"S{id+1}": "Sensitive2"}) cache.get(f"S{id}") diff --git a/tests/integrations/django/test_db_query_data.py b/tests/integrations/django/test_db_query_data.py index ccbe6ee28a..82f1f339a6 100644 --- a/tests/integrations/django/test_db_query_data.py +++ b/tests/integrations/django/test_db_query_data.py @@ -16,7 +16,7 @@ from freezegun import freeze_time from werkzeug.test import Client -from sentry_sdk import start_transaction, start_span +from sentry_sdk import start_span from sentry_sdk.consts import SPANDATA from sentry_sdk.integrations.django import DjangoIntegration from sentry_sdk.tracing_utils import record_sql_queries @@ -496,7 +496,7 @@ def test_db_span_origin_executemany(sentry_init, client, capture_events): if "postgres" not in connections: pytest.skip("postgres tests disabled") - with start_transaction(name="test_transaction"): + with start_span(name="test_transaction"): from django.db import connection, transaction cursor = connection.cursor() diff --git a/tests/integrations/huggingface_hub/test_huggingface_hub.py b/tests/integrations/huggingface_hub/test_huggingface_hub.py index f43159d80e..7e84d648ee 100644 --- a/tests/integrations/huggingface_hub/test_huggingface_hub.py +++ b/tests/integrations/huggingface_hub/test_huggingface_hub.py @@ -6,7 +6,7 @@ ) from huggingface_hub.errors import OverloadedError -from sentry_sdk import start_transaction +from sentry_sdk import start_span from sentry_sdk.integrations.huggingface_hub import HuggingfaceHubIntegration from unittest import mock # python 3.3 and above @@ -43,7 +43,7 @@ def test_nonstreaming_chat_completion( client.post = mock.Mock( return_value=b'[{"generated_text": "the model response"}]' ) - with start_transaction(name="huggingface_hub tx"): + with start_span(name="huggingface_hub tx"): response = client.text_generation( prompt="hello", details=details_arg, @@ -95,7 +95,7 @@ def test_streaming_chat_completion( }""", ] ) - with start_transaction(name="huggingface_hub tx"): + with start_span(name="huggingface_hub tx"): response = list( client.text_generation( prompt="hello", @@ -154,7 +154,7 @@ def test_span_origin(sentry_init, capture_events): }""", ] ) - with start_transaction(name="huggingface_hub tx"): + with start_span(name="huggingface_hub tx"): list( client.text_generation( prompt="hello", diff --git a/tests/integrations/langchain/test_langchain.py b/tests/integrations/langchain/test_langchain.py index b9e5705b88..2ac6679321 100644 --- a/tests/integrations/langchain/test_langchain.py +++ b/tests/integrations/langchain/test_langchain.py @@ -14,7 +14,7 @@ from langchain_core.messages import BaseMessage, AIMessageChunk from langchain_core.outputs import ChatGenerationChunk -from sentry_sdk import start_transaction +from sentry_sdk import start_span from sentry_sdk.integrations.langchain import LangchainIntegration from langchain.agents import tool, AgentExecutor, create_openai_tools_agent from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder @@ -163,7 +163,7 @@ def test_langchain_agent( agent_executor = AgentExecutor(agent=agent, tools=[get_word_length], verbose=True) - with start_transaction(): + with start_span(name="agent"): list(agent_executor.stream({"input": "How many letters in the word eudca"})) tx = events[0] @@ -237,7 +237,7 @@ def test_langchain_error(sentry_init, capture_events): agent_executor = AgentExecutor(agent=agent, tools=[get_word_length], verbose=True) - with start_transaction(), pytest.raises(Exception): + with start_span(name="agent"), pytest.raises(Exception): list(agent_executor.stream({"input": "How many letters in the word eudca"})) error = events[0] @@ -332,7 +332,7 @@ def test_span_origin(sentry_init, capture_events): agent_executor = AgentExecutor(agent=agent, tools=[get_word_length], verbose=True) - with start_transaction(): + with start_span(name="agent"): list(agent_executor.stream({"input": "How many letters in the word eudca"})) (event,) = events diff --git a/tests/integrations/openai/test_openai.py b/tests/integrations/openai/test_openai.py index 1d5ce7a2b1..0508d7d056 100644 --- a/tests/integrations/openai/test_openai.py +++ b/tests/integrations/openai/test_openai.py @@ -6,7 +6,7 @@ from openai.types.chat.chat_completion_chunk import ChoiceDelta, Choice as DeltaChoice from openai.types.create_embedding_response import Usage as EmbeddingTokenUsage -from sentry_sdk import start_transaction +from sentry_sdk import start_span from sentry_sdk.integrations.openai import ( OpenAIIntegration, _calculate_chat_completion_usage, @@ -67,7 +67,7 @@ def test_nonstreaming_chat_completion( client = OpenAI(api_key="z") client.chat.completions._post = mock.Mock(return_value=EXAMPLE_CHAT_COMPLETION) - with start_transaction(name="openai tx"): + with start_span(name="openai tx"): response = ( client.chat.completions.create( model="some-model", messages=[{"role": "system", "content": "hello"}] @@ -112,7 +112,7 @@ async def test_nonstreaming_chat_completion_async( client = AsyncOpenAI(api_key="z") client.chat.completions._post = AsyncMock(return_value=EXAMPLE_CHAT_COMPLETION) - with start_transaction(name="openai tx"): + with start_span(name="openai tx"): response = await client.chat.completions.create( model="some-model", messages=[{"role": "system", "content": "hello"}] ) @@ -204,7 +204,7 @@ def test_streaming_chat_completion( ] client.chat.completions._post = mock.Mock(return_value=returned_stream) - with start_transaction(name="openai tx"): + with start_span(name="openai tx"): response_stream = client.chat.completions.create( model="some-model", messages=[{"role": "system", "content": "hello"}] ) @@ -298,7 +298,7 @@ async def test_streaming_chat_completion_async( ) client.chat.completions._post = AsyncMock(return_value=returned_stream) - with start_transaction(name="openai tx"): + with start_span(name="openai tx"): response_stream = await client.chat.completions.create( model="some-model", messages=[{"role": "system", "content": "hello"}] ) @@ -393,7 +393,7 @@ def test_embeddings_create( ) client.embeddings._post = mock.Mock(return_value=returned_embedding) - with start_transaction(name="openai tx"): + with start_span(name="openai tx"): response = client.embeddings.create( input="hello", model="text-embedding-3-large" ) @@ -441,7 +441,7 @@ async def test_embeddings_create_async( ) client.embeddings._post = AsyncMock(return_value=returned_embedding) - with start_transaction(name="openai tx"): + with start_span(name="openai tx"): response = await client.embeddings.create( input="hello", model="text-embedding-3-large" ) @@ -528,7 +528,7 @@ def test_span_origin_nonstreaming_chat(sentry_init, capture_events): client = OpenAI(api_key="z") client.chat.completions._post = mock.Mock(return_value=EXAMPLE_CHAT_COMPLETION) - with start_transaction(name="openai tx"): + with start_span(name="openai tx"): client.chat.completions.create( model="some-model", messages=[{"role": "system", "content": "hello"}] ) @@ -550,7 +550,7 @@ async def test_span_origin_nonstreaming_chat_async(sentry_init, capture_events): client = AsyncOpenAI(api_key="z") client.chat.completions._post = AsyncMock(return_value=EXAMPLE_CHAT_COMPLETION) - with start_transaction(name="openai tx"): + with start_span(name="openai tx"): await client.chat.completions.create( model="some-model", messages=[{"role": "system", "content": "hello"}] ) @@ -607,7 +607,7 @@ def test_span_origin_streaming_chat(sentry_init, capture_events): ] client.chat.completions._post = mock.Mock(return_value=returned_stream) - with start_transaction(name="openai tx"): + with start_span(name="openai tx"): response_stream = client.chat.completions.create( model="some-model", messages=[{"role": "system", "content": "hello"}] ) @@ -671,7 +671,7 @@ async def test_span_origin_streaming_chat_async(sentry_init, capture_events): ) client.chat.completions._post = AsyncMock(return_value=returned_stream) - with start_transaction(name="openai tx"): + with start_span(name="openai tx"): response_stream = await client.chat.completions.create( model="some-model", messages=[{"role": "system", "content": "hello"}] ) @@ -706,7 +706,7 @@ def test_span_origin_embeddings(sentry_init, capture_events): ) client.embeddings._post = mock.Mock(return_value=returned_embedding) - with start_transaction(name="openai tx"): + with start_span(name="openai tx"): client.embeddings.create(input="hello", model="text-embedding-3-large") (event,) = events @@ -736,7 +736,7 @@ async def test_span_origin_embeddings_async(sentry_init, capture_events): ) client.embeddings._post = AsyncMock(return_value=returned_embedding) - with start_transaction(name="openai tx"): + with start_span(name="openai tx"): await client.embeddings.create(input="hello", model="text-embedding-3-large") (event,) = events diff --git a/tests/integrations/redis/test_redis_cache_module.py b/tests/integrations/redis/test_redis_cache_module.py index e02b1ec31a..b1c012e5ee 100644 --- a/tests/integrations/redis/test_redis_cache_module.py +++ b/tests/integrations/redis/test_redis_cache_module.py @@ -24,7 +24,7 @@ def test_no_cache_basic(sentry_init, capture_events, render_span_tree): events = capture_events() connection = FakeStrictRedis() - with sentry_sdk.start_transaction(): + with sentry_sdk.start_span(name="cache"): connection.get("mycachekey") (event,) = events @@ -49,7 +49,7 @@ def test_cache_basic(sentry_init, capture_events, render_span_tree): events = capture_events() connection = FakeStrictRedis() - with sentry_sdk.start_transaction(): + with sentry_sdk.start_span(name="cache"): connection.hget("mycachekey", "myfield") connection.get("mycachekey") connection.set("mycachekey1", "bla") @@ -87,7 +87,7 @@ def test_cache_keys(sentry_init, capture_events, render_span_tree): events = capture_events() connection = FakeStrictRedis() - with sentry_sdk.start_transaction(): + with sentry_sdk.start_span(name="cache"): connection.get("somethingelse") connection.get("blub") connection.get("blubkeything") @@ -120,7 +120,7 @@ def test_cache_data(sentry_init, capture_events): events = capture_events() connection = FakeStrictRedis(host="mycacheserver.io", port=6378) - with sentry_sdk.start_transaction(): + with sentry_sdk.start_span(name="cache"): connection.get("mycachekey") connection.set("mycachekey", "事实胜于雄辩") connection.get("mycachekey") @@ -203,7 +203,7 @@ def test_cache_prefixes(sentry_init, capture_events): events = capture_events() connection = FakeStrictRedis() - with sentry_sdk.start_transaction(): + with sentry_sdk.start_span(name="cache"): connection.mget("yes", "no") connection.mget("no", 1, "yes") connection.mget("no", "yes.1", "yes.2") diff --git a/tests/integrations/redis_py_cluster_legacy/test_redis_py_cluster_legacy.py b/tests/integrations/redis_py_cluster_legacy/test_redis_py_cluster_legacy.py index 5e0b724436..a530fec115 100644 --- a/tests/integrations/redis_py_cluster_legacy/test_redis_py_cluster_legacy.py +++ b/tests/integrations/redis_py_cluster_legacy/test_redis_py_cluster_legacy.py @@ -4,7 +4,7 @@ import rediscluster from sentry_sdk import capture_message -from sentry_sdk.api import start_transaction +from sentry_sdk.api import start_span from sentry_sdk.consts import SPANDATA from sentry_sdk.integrations.redis import RedisIntegration from tests.conftest import ApproxDict @@ -84,7 +84,7 @@ def test_rediscluster_pipeline( events = capture_events() rc = rediscluster.RedisCluster(connection_pool=MOCK_CONNECTION_POOL) - with start_transaction(): + with start_span(name="redis"): pipeline = rc.pipeline() pipeline.get("foo") pipeline.set("bar", 1) @@ -120,7 +120,7 @@ def test_db_connection_attributes_client(sentry_init, capture_events, redisclust events = capture_events() rc = rediscluster_cls(connection_pool=MOCK_CONNECTION_POOL) - with start_transaction(): + with start_span(name="redis"): rc.get("foobar") (event,) = events @@ -147,7 +147,7 @@ def test_db_connection_attributes_pipeline( events = capture_events() rc = rediscluster.RedisCluster(connection_pool=MOCK_CONNECTION_POOL) - with start_transaction(): + with start_span(name="redis"): pipeline = rc.pipeline() pipeline.get("foo") pipeline.execute() diff --git a/tests/integrations/socket/test_socket.py b/tests/integrations/socket/test_socket.py index e629114b2b..500e9b5608 100644 --- a/tests/integrations/socket/test_socket.py +++ b/tests/integrations/socket/test_socket.py @@ -1,6 +1,6 @@ import socket -from sentry_sdk import start_transaction +from sentry_sdk import start_span from sentry_sdk.integrations.socket import SocketIntegration from tests.conftest import ApproxDict @@ -9,7 +9,7 @@ def test_getaddrinfo_trace(sentry_init, capture_events): sentry_init(integrations=[SocketIntegration()], traces_sample_rate=1.0) events = capture_events() - with start_transaction(): + with start_span(name="socket"): socket.getaddrinfo("example.com", 443) (event,) = events @@ -31,7 +31,7 @@ def test_create_connection_trace(sentry_init, capture_events): sentry_init(integrations=[SocketIntegration()], traces_sample_rate=1.0) events = capture_events() - with start_transaction(): + with start_span(name="socket"): socket.create_connection(("example.com", 443), timeout, None) (event,) = events @@ -65,7 +65,7 @@ def test_span_origin(sentry_init, capture_events): ) events = capture_events() - with start_transaction(name="foo"): + with start_span(name="foo"): socket.create_connection(("example.com", 443), 1, None) (event,) = events diff --git a/tests/integrations/tornado/test_tornado.py b/tests/integrations/tornado/test_tornado.py index 837da07434..e5dae3fcd9 100644 --- a/tests/integrations/tornado/test_tornado.py +++ b/tests/integrations/tornado/test_tornado.py @@ -4,7 +4,7 @@ import pytest import sentry_sdk -from sentry_sdk import start_transaction, capture_message +from sentry_sdk import start_span, capture_message from sentry_sdk.integrations.tornado import TornadoIntegration from tornado.web import RequestHandler, Application, HTTPError @@ -117,7 +117,7 @@ def test_transactions(tornado_testcase, sentry_init, capture_events, handler, co events = capture_events() client = tornado_testcase(Application([(r"/hi", handler)])) - with start_transaction(name="client") as span: + with start_span(name="client") as span: pass response = client.fetch( @@ -135,7 +135,7 @@ def test_transactions(tornado_testcase, sentry_init, capture_events, handler, co assert client_tx["transaction"] == "client" assert client_tx["transaction_info"] == { "source": "custom" - } # because this is just the start_transaction() above. + } # because this is just the start_span() above. if server_error is not None: assert server_error["exception"]["values"][0]["type"] == "ZeroDivisionError" diff --git a/tests/profiler/test_continuous_profiler.py b/tests/profiler/test_continuous_profiler.py index 1b96f27036..f56afe656e 100644 --- a/tests/profiler/test_continuous_profiler.py +++ b/tests/profiler/test_continuous_profiler.py @@ -198,7 +198,7 @@ def test_continuous_profiler_auto_start_and_manual_stop( thread = threading.current_thread() - with sentry_sdk.start_transaction(name="profiling"): + with sentry_sdk.start_span(name="profiling"): with sentry_sdk.start_span(op="op"): time.sleep(0.05) @@ -209,7 +209,7 @@ def test_continuous_profiler_auto_start_and_manual_stop( envelopes.clear() - with sentry_sdk.start_transaction(name="profiling"): + with sentry_sdk.start_span(name="profiling"): with sentry_sdk.start_span(op="op"): time.sleep(0.05) @@ -219,7 +219,7 @@ def test_continuous_profiler_auto_start_and_manual_stop( envelopes.clear() - with sentry_sdk.start_transaction(name="profiling"): + with sentry_sdk.start_span(name="profiling"): with sentry_sdk.start_span(op="op"): time.sleep(0.05) @@ -260,7 +260,7 @@ def test_continuous_profiler_manual_start_and_stop( envelopes.clear() - with sentry_sdk.start_transaction(name="profiling"): + with sentry_sdk.start_span(name="profiling"): with sentry_sdk.start_span(op="op"): time.sleep(0.05) @@ -270,7 +270,7 @@ def test_continuous_profiler_manual_start_and_stop( envelopes.clear() - with sentry_sdk.start_transaction(name="profiling"): + with sentry_sdk.start_span(name="profiling"): with sentry_sdk.start_span(op="op"): time.sleep(0.05) diff --git a/tests/profiler/test_transaction_profiler.py b/tests/profiler/test_transaction_profiler.py index a77942e788..7679831be3 100644 --- a/tests/profiler/test_transaction_profiler.py +++ b/tests/profiler/test_transaction_profiler.py @@ -9,7 +9,7 @@ import pytest -from sentry_sdk import start_transaction +from sentry_sdk import start_span from sentry_sdk.profiler.transaction_profiler import ( GeventScheduler, Profile, @@ -148,7 +148,7 @@ def test_profiles_sample_rate( with mock.patch( "sentry_sdk.profiler.transaction_profiler.random.random", return_value=0.5 ): - with start_transaction(name="profiling"): + with start_span(name="profiling"): pass items = defaultdict(list) @@ -219,7 +219,7 @@ def test_profiles_sampler( with mock.patch( "sentry_sdk.profiler.transaction_profiler.random.random", return_value=0.5 ): - with start_transaction(name="profiling"): + with start_span(name="profiling"): pass items = defaultdict(list) @@ -249,7 +249,7 @@ def test_minimum_unique_samples_required( envelopes = capture_envelopes() record_lost_event_calls = capture_record_lost_event_calls() - with start_transaction(name="profiling"): + with start_span(name="profiling"): pass items = defaultdict(list) @@ -277,7 +277,7 @@ def test_profile_captured( envelopes = capture_envelopes() - with start_transaction(name="profiling"): + with start_span(name="profiling"): time.sleep(0.05) items = defaultdict(list) diff --git a/tests/test_ai_monitoring.py b/tests/test_ai_monitoring.py index 5e7c7432fa..9ecd75fc84 100644 --- a/tests/test_ai_monitoring.py +++ b/tests/test_ai_monitoring.py @@ -16,7 +16,7 @@ def tool(**kwargs): def pipeline(): tool() - with sentry_sdk.start_transaction(): + with sentry_sdk.start_span(name="pipeline"): pipeline() transaction = events[0] @@ -43,7 +43,7 @@ def tool(**kwargs): def pipeline(): tool() - with sentry_sdk.start_transaction(): + with sentry_sdk.start_span(name="pipeline"): pipeline(sentry_tags={"user": "colin"}, sentry_data={"some_data": "value"}) transaction = events[0] @@ -74,7 +74,7 @@ async def async_tool(**kwargs): async def async_pipeline(): await async_tool() - with sentry_sdk.start_transaction(): + with sentry_sdk.start_span(name="async_pipeline"): await async_pipeline() transaction = events[0] @@ -102,7 +102,7 @@ async def async_tool(**kwargs): async def async_pipeline(): await async_tool() - with sentry_sdk.start_transaction(): + with sentry_sdk.start_span(name="async_pipeline"): await async_pipeline( sentry_tags={"user": "czyber"}, sentry_data={"some_data": "value"} ) diff --git a/tests/test_basics.py b/tests/test_basics.py index 3c05f9848a..cbf0177403 100644 --- a/tests/test_basics.py +++ b/tests/test_basics.py @@ -16,7 +16,7 @@ capture_event, capture_exception, capture_message, - start_transaction, + start_span, last_event_id, add_breadcrumb, isolation_scope, @@ -174,7 +174,7 @@ def before_send_transaction(event, hint): traces_sample_rate=1.0, ) events = capture_events() - transaction = start_transaction(name="foo") + transaction = start_span(name="foo") transaction.finish() (event,) = events @@ -191,7 +191,7 @@ def before_send_transaction_discard(event, hint): traces_sample_rate=1.0, ) events = capture_events() - transaction = start_transaction(name="foo") + transaction = start_span(name="foo") transaction.finish() assert len(events) == 0 @@ -592,7 +592,7 @@ def foo(event, hint): capture_message("dropped") - with start_transaction(name="dropped"): + with start_span(name="dropped"): pass assert len(events) == 0 @@ -697,7 +697,7 @@ def test_functions_to_trace(sentry_init, capture_events): events = capture_events() - with start_transaction(name="something"): + with start_span(name="something"): time.sleep(0) for word in ["World", "You"]: @@ -733,7 +733,7 @@ def test_functions_to_trace_with_class(sentry_init, capture_events): events = capture_events() - with start_transaction(name="something"): + with start_span(name="something"): wg = WorldGreeter("World") wg.greet() wg.greet("You") @@ -822,7 +822,7 @@ def test_last_event_id_transaction(sentry_init): assert last_event_id() is None - with start_transaction(name="test"): + with start_span(name="test"): pass assert last_event_id() is None, "Transaction should not set last_event_id" diff --git a/tests/test_scope.py b/tests/test_scope.py index e04fcb2e05..1ae1a2fd35 100644 --- a/tests/test_scope.py +++ b/tests/test_scope.py @@ -807,7 +807,7 @@ def test_nested_scopes_with_tags(sentry_init, capture_envelopes): with sentry_sdk.new_scope() as scope2: scope2.set_tag("current_scope2", 1) - with sentry_sdk.start_transaction(name="trx") as trx: + with sentry_sdk.start_span(name="trx") as trx: trx.set_tag("trx", 1) with sentry_sdk.start_span(op="span1") as span1: @@ -898,7 +898,7 @@ def test_last_event_id_transaction(sentry_init): assert Scope.last_event_id() is None - with sentry_sdk.start_transaction(name="test"): + with sentry_sdk.start_span(name="test"): pass assert Scope.last_event_id() is None, "Transaction should not set last_event_id" diff --git a/tests/test_scrubber.py b/tests/test_scrubber.py index 2c462153dd..8bef3bac10 100644 --- a/tests/test_scrubber.py +++ b/tests/test_scrubber.py @@ -1,7 +1,7 @@ import sys import logging -from sentry_sdk import capture_exception, capture_event, start_transaction, start_span +from sentry_sdk import capture_exception, capture_event, start_span from sentry_sdk.utils import event_from_exception from sentry_sdk.scrubber import EventScrubber from tests.conftest import ApproxDict @@ -145,7 +145,7 @@ def test_span_data_scrubbing(sentry_init, capture_events): sentry_init(traces_sample_rate=1.0) events = capture_events() - with start_transaction(name="hi"): + with start_span(name="hi"): with start_span(op="foo", name="bar") as span: span.set_data("password", "secret") span.set_data("datafoo", "databar") From 525d1564c3aabc7ebd5f43b553083f8f2df423f7 Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Fri, 10 Jan 2025 13:08:01 +0100 Subject: [PATCH 159/244] Pin ubuntu to 22.04 (#3915) python 3.7 doesnt work on ubuntu 24.04 --- .github/workflows/test-integrations-ai.yml | 4 ++-- .github/workflows/test-integrations-aws.yml | 2 +- .github/workflows/test-integrations-cloud.yml | 4 ++-- .github/workflows/test-integrations-common.yml | 2 +- .github/workflows/test-integrations-dbs.yml | 4 ++-- .github/workflows/test-integrations-graphql.yml | 4 ++-- .github/workflows/test-integrations-misc.yml | 12 ++++++++++-- .github/workflows/test-integrations-network.yml | 4 ++-- .github/workflows/test-integrations-tasks.yml | 4 ++-- .github/workflows/test-integrations-web-1.yml | 4 ++-- .github/workflows/test-integrations-web-2.yml | 4 ++-- .../split_tox_gh_actions/templates/test_group.jinja | 2 +- 12 files changed, 29 insertions(+), 21 deletions(-) diff --git a/.github/workflows/test-integrations-ai.yml b/.github/workflows/test-integrations-ai.yml index 77cac55430..85c5475aff 100644 --- a/.github/workflows/test-integrations-ai.yml +++ b/.github/workflows/test-integrations-ai.yml @@ -30,7 +30,7 @@ jobs: fail-fast: false matrix: python-version: ["3.7","3.9","3.11","3.12","3.13"] - os: [ubuntu-latest] + os: [ubuntu-22.04] steps: - uses: actions/checkout@v4.2.2 - uses: actions/setup-python@v5 @@ -92,7 +92,7 @@ jobs: fail-fast: false matrix: python-version: ["3.8","3.9","3.11","3.12","3.13"] - os: [ubuntu-latest] + os: [ubuntu-22.04] steps: - uses: actions/checkout@v4.2.2 - uses: actions/setup-python@v5 diff --git a/.github/workflows/test-integrations-aws.yml b/.github/workflows/test-integrations-aws.yml index 698902fec0..33de7bac46 100644 --- a/.github/workflows/test-integrations-aws.yml +++ b/.github/workflows/test-integrations-aws.yml @@ -62,7 +62,7 @@ jobs: fail-fast: false matrix: python-version: ["3.9"] - os: [ubuntu-latest] + os: [ubuntu-22.04] needs: check-permissions steps: - uses: actions/checkout@v4.2.2 diff --git a/.github/workflows/test-integrations-cloud.yml b/.github/workflows/test-integrations-cloud.yml index a73adae869..8c450dc911 100644 --- a/.github/workflows/test-integrations-cloud.yml +++ b/.github/workflows/test-integrations-cloud.yml @@ -30,7 +30,7 @@ jobs: fail-fast: false matrix: python-version: ["3.8","3.11","3.12","3.13"] - os: [ubuntu-latest] + os: [ubuntu-22.04] steps: - uses: actions/checkout@v4.2.2 - uses: actions/setup-python@v5 @@ -88,7 +88,7 @@ jobs: fail-fast: false matrix: python-version: ["3.7","3.9","3.11","3.12","3.13"] - os: [ubuntu-latest] + os: [ubuntu-22.04] steps: - uses: actions/checkout@v4.2.2 - uses: actions/setup-python@v5 diff --git a/.github/workflows/test-integrations-common.yml b/.github/workflows/test-integrations-common.yml index 237b94bc26..24c0e84026 100644 --- a/.github/workflows/test-integrations-common.yml +++ b/.github/workflows/test-integrations-common.yml @@ -30,7 +30,7 @@ jobs: fail-fast: false matrix: python-version: ["3.7","3.8","3.9","3.10","3.11","3.12","3.13"] - os: [ubuntu-latest] + os: [ubuntu-22.04] steps: - uses: actions/checkout@v4.2.2 - uses: actions/setup-python@v5 diff --git a/.github/workflows/test-integrations-dbs.yml b/.github/workflows/test-integrations-dbs.yml index 6bf6e193bf..12359c93b4 100644 --- a/.github/workflows/test-integrations-dbs.yml +++ b/.github/workflows/test-integrations-dbs.yml @@ -30,7 +30,7 @@ jobs: fail-fast: false matrix: python-version: ["3.7","3.8","3.11","3.12","3.13"] - os: [ubuntu-latest] + os: [ubuntu-22.04] services: postgres: image: postgres @@ -115,7 +115,7 @@ jobs: fail-fast: false matrix: python-version: ["3.7","3.8","3.9","3.10","3.11","3.12"] - os: [ubuntu-latest] + os: [ubuntu-22.04] services: postgres: image: postgres diff --git a/.github/workflows/test-integrations-graphql.yml b/.github/workflows/test-integrations-graphql.yml index baade93bf6..7b6571619a 100644 --- a/.github/workflows/test-integrations-graphql.yml +++ b/.github/workflows/test-integrations-graphql.yml @@ -30,7 +30,7 @@ jobs: fail-fast: false matrix: python-version: ["3.7","3.8","3.12","3.13"] - os: [ubuntu-latest] + os: [ubuntu-22.04] steps: - uses: actions/checkout@v4.2.2 - uses: actions/setup-python@v5 @@ -88,7 +88,7 @@ jobs: fail-fast: false matrix: python-version: ["3.7","3.8","3.11","3.12"] - os: [ubuntu-latest] + os: [ubuntu-22.04] steps: - uses: actions/checkout@v4.2.2 - uses: actions/setup-python@v5 diff --git a/.github/workflows/test-integrations-misc.yml b/.github/workflows/test-integrations-misc.yml index 581b4cb8d0..cfd5dae663 100644 --- a/.github/workflows/test-integrations-misc.yml +++ b/.github/workflows/test-integrations-misc.yml @@ -30,7 +30,7 @@ jobs: fail-fast: false matrix: python-version: ["3.7","3.8","3.12","3.13"] - os: [ubuntu-latest] + os: [ubuntu-22.04] steps: - uses: actions/checkout@v4.2.2 - uses: actions/setup-python@v5 @@ -75,6 +75,10 @@ jobs: run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-typer-latest" + - name: Test unleash latest + run: | + set -x # print commands that are executed + ./scripts/runtox.sh "py${{ matrix.python-version }}-unleash-latest" - name: Generate coverage XML if: ${{ !cancelled() }} run: | @@ -104,7 +108,7 @@ jobs: fail-fast: false matrix: python-version: ["3.7","3.8","3.9","3.10","3.11","3.12","3.13"] - os: [ubuntu-latest] + os: [ubuntu-22.04] steps: - uses: actions/checkout@v4.2.2 - uses: actions/setup-python@v5 @@ -149,6 +153,10 @@ jobs: run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-typer" + - name: Test unleash pinned + run: | + set -x # print commands that are executed + ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-unleash" - name: Generate coverage XML if: ${{ !cancelled() }} run: | diff --git a/.github/workflows/test-integrations-network.yml b/.github/workflows/test-integrations-network.yml index c8c90ffca4..de02b30d8c 100644 --- a/.github/workflows/test-integrations-network.yml +++ b/.github/workflows/test-integrations-network.yml @@ -30,7 +30,7 @@ jobs: fail-fast: false matrix: python-version: ["3.8","3.9","3.11","3.12","3.13"] - os: [ubuntu-latest] + os: [ubuntu-22.04] steps: - uses: actions/checkout@v4.2.2 - uses: actions/setup-python@v5 @@ -88,7 +88,7 @@ jobs: fail-fast: false matrix: python-version: ["3.7","3.8","3.9","3.10","3.11","3.12","3.13"] - os: [ubuntu-latest] + os: [ubuntu-22.04] steps: - uses: actions/checkout@v4.2.2 - uses: actions/setup-python@v5 diff --git a/.github/workflows/test-integrations-tasks.yml b/.github/workflows/test-integrations-tasks.yml index a6796534ef..1b232c4d64 100644 --- a/.github/workflows/test-integrations-tasks.yml +++ b/.github/workflows/test-integrations-tasks.yml @@ -30,7 +30,7 @@ jobs: fail-fast: false matrix: python-version: ["3.7","3.8","3.10","3.11","3.12","3.13"] - os: [ubuntu-latest] + os: [ubuntu-22.04] steps: - uses: actions/checkout@v4.2.2 - uses: actions/setup-python@v5 @@ -106,7 +106,7 @@ jobs: fail-fast: false matrix: python-version: ["3.7","3.8","3.9","3.10","3.11","3.12"] - os: [ubuntu-latest] + os: [ubuntu-22.04] steps: - uses: actions/checkout@v4.2.2 - uses: actions/setup-python@v5 diff --git a/.github/workflows/test-integrations-web-1.yml b/.github/workflows/test-integrations-web-1.yml index 076fbd68cf..1b3163c7ba 100644 --- a/.github/workflows/test-integrations-web-1.yml +++ b/.github/workflows/test-integrations-web-1.yml @@ -30,7 +30,7 @@ jobs: fail-fast: false matrix: python-version: ["3.8","3.10","3.12","3.13"] - os: [ubuntu-latest] + os: [ubuntu-22.04] services: postgres: image: postgres @@ -106,7 +106,7 @@ jobs: fail-fast: false matrix: python-version: ["3.7","3.8","3.9","3.10","3.11","3.12"] - os: [ubuntu-latest] + os: [ubuntu-22.04] services: postgres: image: postgres diff --git a/.github/workflows/test-integrations-web-2.yml b/.github/workflows/test-integrations-web-2.yml index 5508421ed2..fdd6c5ede3 100644 --- a/.github/workflows/test-integrations-web-2.yml +++ b/.github/workflows/test-integrations-web-2.yml @@ -30,7 +30,7 @@ jobs: fail-fast: false matrix: python-version: ["3.7","3.8","3.9","3.11","3.12","3.13"] - os: [ubuntu-latest] + os: [ubuntu-22.04] steps: - uses: actions/checkout@v4.2.2 - uses: actions/setup-python@v5 @@ -112,7 +112,7 @@ jobs: fail-fast: false matrix: python-version: ["3.7","3.8","3.9","3.11","3.12","3.13"] - os: [ubuntu-latest] + os: [ubuntu-22.04] steps: - uses: actions/checkout@v4.2.2 - uses: actions/setup-python@v5 diff --git a/scripts/split_tox_gh_actions/templates/test_group.jinja b/scripts/split_tox_gh_actions/templates/test_group.jinja index 9631df8846..0626fe03ca 100644 --- a/scripts/split_tox_gh_actions/templates/test_group.jinja +++ b/scripts/split_tox_gh_actions/templates/test_group.jinja @@ -6,7 +6,7 @@ fail-fast: false matrix: python-version: [{{ py_versions.get(category)|join(",") }}] - os: [ubuntu-latest] + os: [ubuntu-22.04] {% if needs_github_secrets %} needs: check-permissions From 869dfea078bbccacddfab06988b4f7bc46708b0b Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Fri, 10 Jan 2025 14:38:44 +0100 Subject: [PATCH 160/244] Fix redis cache tests after span name add --- tests/integrations/redis/test_redis_cache_module.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/integrations/redis/test_redis_cache_module.py b/tests/integrations/redis/test_redis_cache_module.py index b1c012e5ee..75f58d346d 100644 --- a/tests/integrations/redis/test_redis_cache_module.py +++ b/tests/integrations/redis/test_redis_cache_module.py @@ -31,7 +31,7 @@ def test_no_cache_basic(sentry_init, capture_events, render_span_tree): assert ( render_span_tree(event) == """\ -- op="": description=null +- op="cache": description=null - op="db.redis": description="GET 'mycachekey'"\ """ ) @@ -61,7 +61,7 @@ def test_cache_basic(sentry_init, capture_events, render_span_tree): assert ( render_span_tree(event) == """\ -- op="": description=null +- op="cache": description=null - op="db.redis": description="HGET 'mycachekey' [Filtered]" - op="cache.get": description="mycachekey" - op="db.redis": description="GET 'mycachekey'" @@ -97,7 +97,7 @@ def test_cache_keys(sentry_init, capture_events, render_span_tree): assert ( render_span_tree(event) == """\ -- op="": description=null +- op="cache": description=null - op="db.redis": description="GET 'somethingelse'" - op="cache.get": description="blub" - op="db.redis": description="GET 'blub'" From ab5d8a797e1b6ca217f022f3e7aec1cdb69f3805 Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Fri, 10 Jan 2025 14:39:24 +0100 Subject: [PATCH 161/244] Setup scopes before client (#3916) The newly added feature flags add an error processor to `current_scope` in their `setup_once`. This is actually an antipattern and shouldn't be encouraged. Either way, this PR sets up the scopes first since the integrations get setup in the `Client` and require that scope to be correct. --- sentry_sdk/_init_implementation.py | 2 +- tests/conftest.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/sentry_sdk/_init_implementation.py b/sentry_sdk/_init_implementation.py index 74bbd9a20f..4ad110ab56 100644 --- a/sentry_sdk/_init_implementation.py +++ b/sentry_sdk/_init_implementation.py @@ -23,9 +23,9 @@ def _init(*args, **kwargs): This takes the same arguments as the client constructor. """ + setup_scope_context_management() client = sentry_sdk.Client(*args, **kwargs) sentry_sdk.get_global_scope().set_client(client) - setup_scope_context_management() _check_python_deprecations() diff --git a/tests/conftest.py b/tests/conftest.py index 70f86b6905..d067e04eb5 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -199,10 +199,10 @@ def inner(identifier): @pytest.fixture def sentry_init(request): def inner(*a, **kw): + setup_scope_context_management() kw.setdefault("transport", TestTransport()) client = sentry_sdk.Client(*a, **kw) sentry_sdk.get_global_scope().set_client(client) - setup_scope_context_management() if request.node.get_closest_marker("forked"): # Do not run isolation if the test is already running in From 7cf7373a9bb0410853a727869074fac5daccb7fa Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Mon, 13 Jan 2025 17:09:00 +0100 Subject: [PATCH 162/244] Fix langchain integration (#3921) * Add optional `parent_span` argument to `POTelSpan` constructor and fix `start_child` * `run_id` is reused for the top level pipeline, so make sure to close that span or else we get orphans * Don't use context manager enter/exit since we're doing manual span management * Set correct statuses while finishing the spans --- sentry_sdk/ai/monitoring.py | 2 +- sentry_sdk/ai/utils.py | 2 +- sentry_sdk/integrations/langchain.py | 39 +++++++++++-------- .../opentelemetry/span_processor.py | 14 +++++++ sentry_sdk/tracing.py | 19 ++++++--- .../integrations/langchain/test_langchain.py | 10 +---- 6 files changed, 53 insertions(+), 33 deletions(-) diff --git a/sentry_sdk/ai/monitoring.py b/sentry_sdk/ai/monitoring.py index e149ebe7df..e826f3bf90 100644 --- a/sentry_sdk/ai/monitoring.py +++ b/sentry_sdk/ai/monitoring.py @@ -3,7 +3,7 @@ import sentry_sdk.utils from sentry_sdk import start_span -from sentry_sdk.tracing import Span +from sentry_sdk.tracing import POTelSpan as Span from sentry_sdk.utils import ContextVar from typing import TYPE_CHECKING diff --git a/sentry_sdk/ai/utils.py b/sentry_sdk/ai/utils.py index ed3494f679..4a972071a9 100644 --- a/sentry_sdk/ai/utils.py +++ b/sentry_sdk/ai/utils.py @@ -3,7 +3,7 @@ if TYPE_CHECKING: from typing import Any -from sentry_sdk.tracing import Span +from sentry_sdk.tracing import POTelSpan as Span from sentry_sdk.utils import logger diff --git a/sentry_sdk/integrations/langchain.py b/sentry_sdk/integrations/langchain.py index afce913d8e..deb700bde2 100644 --- a/sentry_sdk/integrations/langchain.py +++ b/sentry_sdk/integrations/langchain.py @@ -3,10 +3,10 @@ import sentry_sdk from sentry_sdk.ai.monitoring import set_ai_pipeline_name, record_token_usage -from sentry_sdk.consts import OP, SPANDATA +from sentry_sdk.consts import OP, SPANDATA, SPANSTATUS from sentry_sdk.ai.utils import set_data_normalized from sentry_sdk.scope import should_send_default_pii -from sentry_sdk.tracing import Span +from sentry_sdk.tracing import POTelSpan as Span from sentry_sdk.integrations import DidNotEnable, Integration from sentry_sdk.utils import logger, capture_internal_exceptions @@ -72,7 +72,6 @@ def setup_once(): class WatchedSpan: - span = None # type: Span num_completion_tokens = 0 # type: int num_prompt_tokens = 0 # type: int no_collect_tokens = False # type: bool @@ -123,8 +122,9 @@ def _handle_error(self, run_id, error): span_data = self.span_map[run_id] if not span_data: return - sentry_sdk.capture_exception(error, span_data.span.scope) - span_data.span.__exit__(None, None, None) + sentry_sdk.capture_exception(error) + span_data.span.set_status(SPANSTATUS.INTERNAL_ERROR) + span_data.span.finish() del self.span_map[run_id] def _normalize_langchain_message(self, message): @@ -136,23 +136,27 @@ def _normalize_langchain_message(self, message): def _create_span(self, run_id, parent_id, **kwargs): # type: (SentryLangchainCallback, UUID, Optional[Any], Any) -> WatchedSpan - watched_span = None # type: Optional[WatchedSpan] - if parent_id: - parent_span = self.span_map.get(parent_id) # type: Optional[WatchedSpan] - if parent_span: - watched_span = WatchedSpan(parent_span.span.start_child(**kwargs)) - parent_span.children.append(watched_span) - if watched_span is None: - watched_span = WatchedSpan( - sentry_sdk.start_span(only_if_parent=True, **kwargs) - ) + parent_watched_span = self.span_map.get(parent_id) if parent_id else None + sentry_span = sentry_sdk.start_span( + parent_span=parent_watched_span.span if parent_watched_span else None, + only_if_parent=True, + **kwargs, + ) + watched_span = WatchedSpan(sentry_span) + if parent_watched_span: + parent_watched_span.children.append(watched_span) if kwargs.get("op", "").startswith("ai.pipeline."): if kwargs.get("name"): set_ai_pipeline_name(kwargs.get("name")) watched_span.is_pipeline = True - watched_span.span.__enter__() + # the same run_id is reused for the pipeline it seems + # so we need to end the older span to avoid orphan spans + existing_span_data = self.span_map.get(run_id) + if existing_span_data is not None: + self._exit_span(existing_span_data, run_id) + self.span_map[run_id] = watched_span self.gc_span_map() return watched_span @@ -163,7 +167,8 @@ def _exit_span(self, span_data, run_id): if span_data.is_pipeline: set_ai_pipeline_name(None) - span_data.span.__exit__(None, None, None) + span_data.span.set_status(SPANSTATUS.OK) + span_data.span.finish() del self.span_map[run_id] def on_llm_start( diff --git a/sentry_sdk/integrations/opentelemetry/span_processor.py b/sentry_sdk/integrations/opentelemetry/span_processor.py index 42ad32a5ea..8d513ec97d 100644 --- a/sentry_sdk/integrations/opentelemetry/span_processor.py +++ b/sentry_sdk/integrations/opentelemetry/span_processor.py @@ -291,3 +291,17 @@ def _common_span_transaction_attributes_as_json(self, span): common_json["tags"] = tags return common_json + + def _log_debug_info(self): + # type: () -> None + import pprint + + pprint.pprint( + { + format_span_id(span_id): [ + (format_span_id(child.context.span_id), child.name) + for child in children + ] + for span_id, children in self._children_spans.items() + } + ) diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py index a0b9439dc8..3ee155aedb 100644 --- a/sentry_sdk/tracing.py +++ b/sentry_sdk/tracing.py @@ -1213,6 +1213,7 @@ def __init__( source=TRANSACTION_SOURCE_CUSTOM, # type: str attributes=None, # type: OTelSpanAttributes only_if_parent=False, # type: bool + parent_span=None, # type: Optional[POTelSpan] otel_span=None, # type: Optional[OtelSpan] **_, # type: dict[str, object] ): @@ -1231,7 +1232,7 @@ def __init__( self._otel_span = otel_span else: skip_span = False - if only_if_parent: + if only_if_parent and parent_span is None: parent_span_context = get_current_span().get_span_context() skip_span = ( not parent_span_context.is_valid or parent_span_context.is_remote @@ -1262,8 +1263,17 @@ def __init__( if sampled is not None: attributes[SentrySpanAttribute.CUSTOM_SAMPLED] = sampled + parent_context = None + if parent_span is not None: + parent_context = otel_trace.set_span_in_context( + parent_span._otel_span + ) + self._otel_span = tracer.start_span( - span_name, start_time=start_timestamp, attributes=attributes + span_name, + context=parent_context, + start_time=start_timestamp, + attributes=attributes, ) self.origin = origin or DEFAULT_SPAN_ORIGIN @@ -1506,10 +1516,7 @@ def timestamp(self): def start_child(self, **kwargs): # type: (**Any) -> POTelSpan - kwargs.setdefault("sampled", self.sampled) - - span = POTelSpan(only_if_parent=True, **kwargs) - return span + return POTelSpan(sampled=self.sampled, parent_span=self, **kwargs) def iter_headers(self): # type: () -> Iterator[Tuple[str, str]] diff --git a/tests/integrations/langchain/test_langchain.py b/tests/integrations/langchain/test_langchain.py index 2ac6679321..f8ab30054d 100644 --- a/tests/integrations/langchain/test_langchain.py +++ b/tests/integrations/langchain/test_langchain.py @@ -187,17 +187,11 @@ def test_langchain_agent( assert "measurements" not in chat_spans[0] if send_default_pii and include_prompts: - assert ( - "You are very powerful" - in chat_spans[0]["data"]["ai.input_messages"][0]["content"] - ) + assert "You are very powerful" in chat_spans[0]["data"]["ai.input_messages"] assert "5" in chat_spans[0]["data"]["ai.responses"] assert "word" in tool_exec_span["data"]["ai.input_messages"] assert 5 == int(tool_exec_span["data"]["ai.responses"]) - assert ( - "You are very powerful" - in chat_spans[1]["data"]["ai.input_messages"][0]["content"] - ) + assert "You are very powerful" in chat_spans[1]["data"]["ai.input_messages"] assert "5" in chat_spans[1]["data"]["ai.responses"] else: assert "ai.input_messages" not in chat_spans[0].get("data", {}) From 5c279bd66df52b96ab0e0686d5412ded58605430 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Tue, 14 Jan 2025 09:15:48 +0100 Subject: [PATCH 163/244] Fix AWS tests in POTel (#3879) Make AWS Lambda tests in POTel. --- sentry_sdk/integrations/aws_lambda.py | 17 +++++++++--- tests/integrations/aws_lambda/test_aws.py | 33 ++++++++++++++++------- 2 files changed, 37 insertions(+), 13 deletions(-) diff --git a/sentry_sdk/integrations/aws_lambda.py b/sentry_sdk/integrations/aws_lambda.py index 8899cc53b2..648859a233 100644 --- a/sentry_sdk/integrations/aws_lambda.py +++ b/sentry_sdk/integrations/aws_lambda.py @@ -5,6 +5,7 @@ from copy import deepcopy from datetime import datetime, timedelta, timezone from os import environ +from urllib.parse import urlencode import sentry_sdk from sentry_sdk.consts import OP @@ -120,6 +121,9 @@ def sentry_handler(aws_event, aws_context, *args, **kwargs): configured_time = aws_context.get_remaining_time_in_millis() with sentry_sdk.isolation_scope() as scope: + scope.set_transaction_name( + aws_context.function_name, source=TRANSACTION_SOURCE_COMPONENT + ) timeout_thread = None with capture_internal_exceptions(): scope.clear_breadcrumbs() @@ -333,7 +337,7 @@ def event_processor(sentry_event, hint, start_time=start_time): request["url"] = _get_url(aws_event, aws_context) if "queryStringParameters" in aws_event: - request["query_string"] = aws_event["queryStringParameters"] + request["query_string"] = urlencode(aws_event["queryStringParameters"]) if "headers" in aws_event: request["headers"] = _filter_headers(aws_event["headers"]) @@ -373,7 +377,9 @@ def _get_url(aws_event, aws_context): path = aws_event.get("path", None) headers = aws_event.get("headers") - if headers is None: + # Some AWS Services (ie. EventBridge) set headers as a list + # or None, so we must ensure it is a dict + if not isinstance(headers, dict): headers = {} host = headers.get("Host", None) @@ -478,7 +484,10 @@ def _prepopulate_attributes(aws_event, aws_context): for prop, attr in EVENT_TO_ATTRIBUTES.items(): if aws_event.get(prop) is not None: - attributes[attr] = aws_event[prop] + if prop == "queryStringParameters": + attributes[attr] = urlencode(aws_event[prop]) + else: + attributes[attr] = aws_event[prop] for prop, attr in CONTEXT_TO_ATTRIBUTES.items(): if getattr(aws_context, prop, None) is not None: @@ -487,7 +496,7 @@ def _prepopulate_attributes(aws_event, aws_context): url = _get_url(aws_event, aws_context) if url: if aws_event.get("queryStringParameters"): - url += f"?{aws_event['queryStringParameters']}" + url += f"?{urlencode(aws_event['queryStringParameters'])}" attributes["url.full"] = url headers = {} diff --git a/tests/integrations/aws_lambda/test_aws.py b/tests/integrations/aws_lambda/test_aws.py index e58fab292d..822a7a9146 100644 --- a/tests/integrations/aws_lambda/test_aws.py +++ b/tests/integrations/aws_lambda/test_aws.py @@ -67,6 +67,8 @@ def truncate_data(data): if data["contexts"].get("trace") is not None: cleaned_data["contexts"]["trace"] = data["contexts"].get("trace") + if cleaned_data["contexts"]["trace"].get("data", {}) != {}: + cleaned_data["contexts"]["trace"]["data"] = {"removed": "by truncate_data()"} if data.get("transaction") is not None: cleaned_data["transaction"] = data.get("transaction") @@ -287,7 +289,8 @@ def test_handler(event, context): "X-Forwarded-Proto": "https" }, "queryStringParameters": { - "bonkers": "true" + "bonkers": "true", + "wild": "false" }, "pathParameters": null, "stageVariables": null, @@ -312,7 +315,7 @@ def test_handler(event, context): "X-Forwarded-Proto": "https", }, "method": "GET", - "query_string": {"bonkers": "true"}, + "query_string": "bonkers=true&wild=false", "url": "https://iwsz2c7uwi.execute-api.us-east-1.amazonaws.com/asd", } @@ -487,6 +490,15 @@ def test_handler(event, context): ), (b"[]", False, 1), ], + ids=[ + "int", + "float", + "string", + "bool", + "list", + "list_with_request_data", + "empty_list", + ], ) def test_non_dict_event( run_lambda_function, @@ -539,9 +551,7 @@ def test_handler(event, context): "headers": {"Host": "x1.io", "X-Forwarded-Proto": "https"}, "method": "GET", "url": "https://x1.io/1", - "query_string": { - "done": "f", - }, + "query_string": "done=f", } else: request_data = {"url": "awslambda:///{}".format(function_name)} @@ -590,7 +600,7 @@ def test_traces_sampler_gets_correct_values_in_sampling_context( import inspect - _, response = run_lambda_function( + function_code = ( LAMBDA_PRELUDE + dedent(inspect.getsource(StringContaining)) + dedent(inspect.getsource(DictionaryContaining)) @@ -621,7 +631,7 @@ def test_handler(event, context): { "http.request.method": "GET", "url.path": "/sit/stay/rollover", - "url.query": "repeat=again", + "url.query": "repeat=twice", "url.full": "http://x.io/sit/stay/rollover?repeat=twice", "network.protocol.name": "http", "server.address": "x.io", @@ -643,10 +653,15 @@ def test_handler(event, context): traces_sampler=traces_sampler, ) """ - ), - b'{"httpMethod": "GET", "path": "/sit/stay/rollover", "query_string": {"repeat": "again"}, "headers": {"Host": "x.io", "X-Forwarded-Proto": "http", "Custom-Header": "Custom Value"}}', + ) ) + payload = b'{"httpMethod": "GET", "path": "/sit/stay/rollover", "queryStringParameters": {"repeat": "twice"}, "headers": {"Host": "x.io", "X-Forwarded-Proto": "http", "Custom-Header": "Custom Value"}}' + + _, response = run_lambda_function( + code=function_code, + payload=payload, + ) assert response["Payload"]["AssertionError raised"] is False From d1059dd8fe019b6be1767b6418bddc9a551f388c Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Wed, 15 Jan 2025 17:02:59 +0100 Subject: [PATCH 164/244] Fix Celery tests in POTel (#3772) Co-authored-by: Neel Shah --- sentry_sdk/integrations/celery/__init__.py | 16 +++-- tests/integrations/celery/test_celery.py | 82 +++++++++++++--------- 2 files changed, 61 insertions(+), 37 deletions(-) diff --git a/sentry_sdk/integrations/celery/__init__.py b/sentry_sdk/integrations/celery/__init__.py index fba87366af..6b74af1cb7 100644 --- a/sentry_sdk/integrations/celery/__init__.py +++ b/sentry_sdk/integrations/celery/__init__.py @@ -112,7 +112,6 @@ def _capture_exception(task, exc_info): return if isinstance(exc_info[1], CELERY_CONTROL_FLOW_EXCEPTIONS): - # ??? Doesn't map to anything _set_status("aborted") return @@ -276,6 +275,7 @@ def apply_async(*args, **kwargs): op=OP.QUEUE_SUBMIT_CELERY, name=task_name, origin=CeleryIntegration.origin, + only_if_parent=True, ) if not task_started_from_beat else NoOpMgr() @@ -306,11 +306,13 @@ def _inner(*args, **kwargs): with isolation_scope() as scope: scope._name = "celery" scope.clear_breadcrumbs() + scope.set_transaction_name(task.name, source=TRANSACTION_SOURCE_TASK) scope.add_event_processor(_make_event_processor(task, *args, **kwargs)) # Celery task objects are not a thing to be trusted. Even # something such as attribute access can fail. headers = args[3].get("headers") or {} + with sentry_sdk.continue_trace(headers): with sentry_sdk.start_span( op=OP.QUEUE_TASK_CELERY, @@ -320,9 +322,13 @@ def _inner(*args, **kwargs): # for some reason, args[1] is a list if non-empty but a # tuple if empty attributes=_prepopulate_attributes(task, list(args[1]), args[2]), - ) as transaction: - transaction.set_status(SPANSTATUS.OK) - return f(*args, **kwargs) + ) as root_span: + return_value = f(*args, **kwargs) + + if root_span.status is None: + root_span.set_status(SPANSTATUS.OK) + + return return_value return _inner # type: ignore @@ -359,6 +365,7 @@ def _inner(*args, **kwargs): op=OP.QUEUE_PROCESS, name=task.name, origin=CeleryIntegration.origin, + only_if_parent=True, ) as span: _set_messaging_destination_name(task, span) @@ -390,6 +397,7 @@ def _inner(*args, **kwargs): ) return f(*args, **kwargs) + except Exception: exc_info = sys.exc_info() with capture_internal_exceptions(): diff --git a/tests/integrations/celery/test_celery.py b/tests/integrations/celery/test_celery.py index 2f8de6968a..c9a110af9e 100644 --- a/tests/integrations/celery/test_celery.py +++ b/tests/integrations/celery/test_celery.py @@ -5,9 +5,11 @@ import pytest from celery import Celery, VERSION from celery.bin import worker +from celery.app.task import Task +from opentelemetry import trace as otel_trace, context import sentry_sdk -from sentry_sdk import start_span, get_current_span +from sentry_sdk import get_current_span from sentry_sdk.integrations.celery import ( CeleryIntegration, _wrap_task_run, @@ -126,14 +128,14 @@ def dummy_task(x, y): foo = 42 # noqa return x / y - with start_span(op="unit test transaction") as transaction: + with sentry_sdk.start_span(op="unit test transaction") as root_span: celery_invocation(dummy_task, 1, 2) _, expected_context = celery_invocation(dummy_task, 1, 0) (_, error_event, _, _) = events - assert error_event["contexts"]["trace"]["trace_id"] == transaction.trace_id - assert error_event["contexts"]["trace"]["span_id"] != transaction.span_id + assert error_event["contexts"]["trace"]["trace_id"] == root_span.trace_id + assert error_event["contexts"]["trace"]["span_id"] != root_span.span_id assert error_event["transaction"] == "dummy_task" assert "celery_task_id" in error_event["tags"] assert error_event["extra"]["celery-job"] == dict( @@ -190,17 +192,14 @@ def test_transaction_events(capture_events, init_celery, celery_invocation, task def dummy_task(x, y): return x / y - # XXX: For some reason the first call does not get instrumented properly. - celery_invocation(dummy_task, 1, 1) - events = capture_events() - with start_span(name="submission") as transaction: + with sentry_sdk.start_span(name="submission") as root_span: celery_invocation(dummy_task, 1, 0 if task_fails else 1) if task_fails: error_event = events.pop(0) - assert error_event["contexts"]["trace"]["trace_id"] == transaction.trace_id + assert error_event["contexts"]["trace"]["trace_id"] == root_span.trace_id assert error_event["exception"]["values"][0]["type"] == "ZeroDivisionError" execution_event, submission_event = events @@ -211,8 +210,8 @@ def dummy_task(x, y): assert submission_event["transaction_info"] == {"source": "custom"} assert execution_event["type"] == submission_event["type"] == "transaction" - assert execution_event["contexts"]["trace"]["trace_id"] == transaction.trace_id - assert submission_event["contexts"]["trace"]["trace_id"] == transaction.trace_id + assert execution_event["contexts"]["trace"]["trace_id"] == root_span.trace_id + assert submission_event["contexts"]["trace"]["trace_id"] == root_span.trace_id if task_fails: assert execution_event["contexts"]["trace"]["status"] == "internal_error" @@ -220,15 +219,12 @@ def dummy_task(x, y): assert execution_event["contexts"]["trace"]["status"] == "ok" assert len(execution_event["spans"]) == 1 - assert ( - execution_event["spans"][0].items() - >= { - "trace_id": str(transaction.trace_id), - "same_process_as_parent": True, + assert execution_event["spans"][0] == ApproxDict( + { + "trace_id": str(root_span.trace_id), "op": "queue.process", "description": "dummy_task", - "data": ApproxDict(), - }.items() + } ) assert submission_event["spans"] == [ { @@ -237,11 +233,14 @@ def dummy_task(x, y): "op": "queue.submit.celery", "origin": "auto.queue.celery", "parent_span_id": submission_event["contexts"]["trace"]["span_id"], - "same_process_as_parent": True, "span_id": submission_event["spans"][0]["span_id"], "start_timestamp": submission_event["spans"][0]["start_timestamp"], "timestamp": submission_event["spans"][0]["timestamp"], - "trace_id": str(transaction.trace_id), + "trace_id": str(root_span.trace_id), + "status": "ok", + "tags": { + "status": "ok", + }, } ] @@ -275,7 +274,7 @@ def test_simple_no_propagation(capture_events, init_celery): def dummy_task(): 1 / 0 - with start_span(name="task") as root_span: + with sentry_sdk.start_span(name="task") as root_span: dummy_task.delay() (event,) = events @@ -350,7 +349,7 @@ def dummy_task(self): runs.append(1) 1 / 0 - with start_span(name="submit_celery"): + with sentry_sdk.start_span(name="submit_celery"): # Curious: Cannot use delay() here or py2.7-celery-4.2 crashes res = dummy_task.apply_async() @@ -445,7 +444,7 @@ def walk_dogs(x, y): walk_dogs, [["Maisey", "Charlie", "Bodhi", "Cory"], "Dog park round trip"], 1 ) - sampling_context = traces_sampler.call_args_list[1][0][0] + sampling_context = traces_sampler.call_args_list[0][0][0] assert sampling_context["celery.job.task"] == "dog_walk" for i, arg in enumerate(args_kwargs["args"]): assert sampling_context[f"celery.job.args.{i}"] == str(arg) @@ -469,7 +468,7 @@ def __call__(self, *args, **kwargs): def dummy_task(x, y): return x / y - with start_span(name="celery"): + with sentry_sdk.start_span(name="celery"): celery_invocation(dummy_task, 1, 0) assert not events @@ -510,7 +509,7 @@ def test_baggage_propagation(init_celery): def dummy_task(self, x, y): return _get_headers(self) - with start_span(name="task") as root_span: + with sentry_sdk.start_span(name="task") as root_span: result = dummy_task.apply_async( args=(1, 0), headers={"baggage": "custom=value"}, @@ -520,6 +519,7 @@ def dummy_task(self, x, y): [ "sentry-release=abcdef", "sentry-trace_id={}".format(root_span.trace_id), + "sentry-transaction=task", "sentry-environment=production", "sentry-sample_rate=1.0", "sentry-sampled=true", @@ -537,26 +537,42 @@ def test_sentry_propagate_traces_override(init_celery): propagate_traces=True, traces_sample_rate=1.0, release="abcdef" ) + # Since we're applying the task inline eagerly, + # we need to cleanup the otel context for this test. + # and since we patch build_tracer, we need to do this before that runs... + # TODO: the right way is to not test this inline + original_apply = Task.apply + + def cleaned_apply(*args, **kwargs): + token = context.attach(otel_trace.set_span_in_context(otel_trace.INVALID_SPAN)) + rv = original_apply(*args, **kwargs) + context.detach(token) + return rv + + Task.apply = cleaned_apply + @celery.task(name="dummy_task", bind=True) def dummy_task(self, message): trace_id = get_current_span().trace_id return trace_id - with start_span(name="task") as root_span: - transaction_trace_id = root_span.trace_id + with sentry_sdk.start_span(name="task") as root_span: + root_span_trace_id = root_span.trace_id # should propagate trace - task_transaction_id = dummy_task.apply_async( + task_trace_id = dummy_task.apply_async( args=("some message",), ).get() - assert transaction_trace_id == task_transaction_id + assert root_span_trace_id == task_trace_id, "Trace should be propagated" # should NOT propagate trace (overrides `propagate_traces` parameter in integration constructor) - task_transaction_id = dummy_task.apply_async( + task_trace_id = dummy_task.apply_async( args=("another message",), headers={"sentry-propagate-traces": False}, ).get() - assert transaction_trace_id != task_transaction_id + assert root_span_trace_id != task_trace_id, "Trace should NOT be propagated" + + Task.apply = original_apply def test_apply_async_manually_span(sentry_init): @@ -710,7 +726,7 @@ def publish(*args, **kwargs): @celery.task() def task(): ... - with start_span(name="task"): + with sentry_sdk.start_span(name="task"): task.apply_async() (event,) = events @@ -773,7 +789,7 @@ def publish(*args, **kwargs): @celery.task() def task(): ... - with start_span(name="custom_transaction"): + with sentry_sdk.start_span(name="custom_transaction"): task.apply_async() (event,) = events From cc4afcc60eef55274b4a7077c7163bfdfd003e5a Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Wed, 15 Jan 2025 17:04:53 +0100 Subject: [PATCH 165/244] Remove 3.7 from celery matrix due to importlib-metadata clash --- tox.ini | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/tox.ini b/tox.ini index 729ba2fa0d..d381bcb246 100644 --- a/tox.ini +++ b/tox.ini @@ -72,9 +72,9 @@ envlist = {py3.7,py3.12,py3.13}-bottle-latest # Celery - {py3.7,py3.8}-celery-v{4} - {py3.7,py3.8}-celery-v{5.0} - {py3.7,py3.10}-celery-v{5.1,5.2} + {py3.8}-celery-v{4} + {py3.8}-celery-v{5.0} + {py3.8,py3.10}-celery-v{5.1,5.2} {py3.8,py3.11,py3.12}-celery-v{5.3,5.4,5.5} {py3.8,py3.12,py3.13}-celery-latest @@ -393,8 +393,6 @@ deps = celery-v5.5: Celery==5.5.0rc3 celery-latest: Celery - {py3.7}-celery: importlib-metadata<5.0 - # Chalice chalice: pytest-chalice==0.0.5 chalice-v1.16: chalice~=1.16.0 From cc2285ccfb0487193386b2d089c510c27fa23446 Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Wed, 15 Jan 2025 17:11:15 +0100 Subject: [PATCH 166/244] Fix workflow files --- .github/workflows/test-integrations-flags.yml | 28 +++---------------- .github/workflows/test-integrations-misc.yml | 8 ------ 2 files changed, 4 insertions(+), 32 deletions(-) diff --git a/.github/workflows/test-integrations-flags.yml b/.github/workflows/test-integrations-flags.yml index 0460868473..8694bc40a9 100644 --- a/.github/workflows/test-integrations-flags.yml +++ b/.github/workflows/test-integrations-flags.yml @@ -30,11 +30,7 @@ jobs: fail-fast: false matrix: python-version: ["3.8","3.12","3.13"] - # python3.6 reached EOL and is no longer being supported on - # new versions of hosted runners on Github Actions - # ubuntu-20.04 is the last version that supported python3.6 - # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 - os: [ubuntu-20.04] + os: [ubuntu-22.04] steps: - uses: actions/checkout@v4.2.2 - uses: actions/setup-python@v5 @@ -59,14 +55,8 @@ jobs: run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-unleash-latest" - - name: Generate coverage XML (Python 3.6) - if: ${{ !cancelled() && matrix.python-version == '3.6' }} - run: | - export COVERAGE_RCFILE=.coveragerc36 - coverage combine .coverage-sentry-* - coverage xml --ignore-errors - name: Generate coverage XML - if: ${{ !cancelled() && matrix.python-version != '3.6' }} + if: ${{ !cancelled() }} run: | coverage combine .coverage-sentry-* coverage xml @@ -94,11 +84,7 @@ jobs: fail-fast: false matrix: python-version: ["3.8","3.12","3.13"] - # python3.6 reached EOL and is no longer being supported on - # new versions of hosted runners on Github Actions - # ubuntu-20.04 is the last version that supported python3.6 - # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 - os: [ubuntu-20.04] + os: [ubuntu-22.04] steps: - uses: actions/checkout@v4.2.2 - uses: actions/setup-python@v5 @@ -123,14 +109,8 @@ jobs: run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-unleash" - - name: Generate coverage XML (Python 3.6) - if: ${{ !cancelled() && matrix.python-version == '3.6' }} - run: | - export COVERAGE_RCFILE=.coveragerc36 - coverage combine .coverage-sentry-* - coverage xml --ignore-errors - name: Generate coverage XML - if: ${{ !cancelled() && matrix.python-version != '3.6' }} + if: ${{ !cancelled() }} run: | coverage combine .coverage-sentry-* coverage xml diff --git a/.github/workflows/test-integrations-misc.yml b/.github/workflows/test-integrations-misc.yml index 3a6dfdaa3f..013a21646c 100644 --- a/.github/workflows/test-integrations-misc.yml +++ b/.github/workflows/test-integrations-misc.yml @@ -67,10 +67,6 @@ jobs: run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-typer-latest" - - name: Test unleash latest - run: | - set -x # print commands that are executed - ./scripts/runtox.sh "py${{ matrix.python-version }}-unleash-latest" - name: Generate coverage XML if: ${{ !cancelled() }} run: | @@ -137,10 +133,6 @@ jobs: run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-typer" - - name: Test unleash pinned - run: | - set -x # print commands that are executed - ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-unleash" - name: Generate coverage XML if: ${{ !cancelled() }} run: | From 2cb11a7ad03cdd61300d6d8fb8a3f6d3d5167621 Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Wed, 15 Jan 2025 17:15:27 +0100 Subject: [PATCH 167/244] Fix merge --- tests/tracing/test_integration_tests.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/tracing/test_integration_tests.py b/tests/tracing/test_integration_tests.py index f44e0cb068..975776e8fb 100644 --- a/tests/tracing/test_integration_tests.py +++ b/tests/tracing/test_integration_tests.py @@ -121,7 +121,7 @@ def test_continue_trace( def test_propagate_traces_deprecation_warning(sentry_init, sample_rate): sentry_init(traces_sample_rate=sample_rate, propagate_traces=False) - with start_transaction(name="hi"): + with start_span(name="hi"): with start_span() as old_span: with pytest.warns(DeprecationWarning): dict( From 7d728f93b1c00262e7181eb102e771cb070e854a Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Thu, 16 Jan 2025 15:28:38 +0100 Subject: [PATCH 168/244] Fix flake8 --- tests/conftest.py | 18 +++++++----------- tests/integrations/httpx/test_httpx.py | 7 +++---- .../opentelemetry/test_propagator.py | 7 ++++--- tests/integrations/rq/test_rq.py | 2 +- .../rust_tracing/test_rust_tracing.py | 1 - tests/integrations/stdlib/test_httplib.py | 11 +++++------ tests/test_api.py | 5 +++-- tests/tracing/test_integration_tests.py | 4 ++-- 8 files changed, 25 insertions(+), 30 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index d067e04eb5..570acb37e9 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -660,16 +660,12 @@ def __ne__(self, other): return not self.__eq__(other) -@pytest.fixture(name="SortedBaggage") -def sorted_baggage_matcher(): - class SortedBaggage: - def __init__(self, baggage): - self.baggage = baggage +class SortedBaggage: + def __init__(self, baggage): + self.baggage = baggage - def __eq__(self, other): - return sorted(self.baggage.split(",")) == sorted(other.split(",")) - - def __ne__(self, other): - return not self.__eq__(other) + def __eq__(self, other): + return sorted(self.baggage.split(",")) == sorted(other.split(",")) - return SortedBaggage + def __ne__(self, other): + return not self.__eq__(other) diff --git a/tests/integrations/httpx/test_httpx.py b/tests/integrations/httpx/test_httpx.py index 9890d1f0cc..f0dc410b14 100644 --- a/tests/integrations/httpx/test_httpx.py +++ b/tests/integrations/httpx/test_httpx.py @@ -9,7 +9,7 @@ from sentry_sdk import capture_message, start_span from sentry_sdk.consts import MATCH_ALL, SPANDATA from sentry_sdk.integrations.httpx import HttpxIntegration -from tests.conftest import ApproxDict +from tests.conftest import ApproxDict, SortedBaggage @pytest.mark.parametrize( @@ -75,7 +75,7 @@ def test_outgoing_trace_headers(sentry_init, httpx_client, capture_envelopes): with start_span( name="/interactions/other-dogs/new-dog", op="greeting.sniff", - ) as span: + ): if asyncio.iscoroutinefunction(httpx_client.get): response = asyncio.get_event_loop().run_until_complete( httpx_client.get(url) @@ -104,7 +104,6 @@ def test_outgoing_trace_headers_append_to_baggage( sentry_init, httpx_client, capture_envelopes, - SortedBaggage, # noqa: N803 ): sentry_init( traces_sample_rate=1.0, @@ -141,7 +140,7 @@ def test_outgoing_trace_headers_append_to_baggage( sampled=1, ) assert response.request.headers["baggage"] == SortedBaggage( - f"custom=data,sentry-trace_id={trace_id},sentry-environment=production,sentry-release=d08ebdb9309e1b004c6f52202de58a09c2268e42,sentry-transaction=/interactions/other-dogs/new-dog,sentry-sample_rate=1.0,sentry-sampled=true" + f"custom=data,sentry-trace_id={trace_id},sentry-environment=production,sentry-release=d08ebdb9309e1b004c6f52202de58a09c2268e42,sentry-transaction=/interactions/other-dogs/new-dog,sentry-sample_rate=1.0,sentry-sampled=true" # noqa: E231 ) diff --git a/tests/integrations/opentelemetry/test_propagator.py b/tests/integrations/opentelemetry/test_propagator.py index ef952ea50a..46f4250771 100644 --- a/tests/integrations/opentelemetry/test_propagator.py +++ b/tests/integrations/opentelemetry/test_propagator.py @@ -11,6 +11,7 @@ SENTRY_TRACE_KEY, ) from sentry_sdk.integrations.opentelemetry.propagator import SentryPropagator +from tests.conftest import SortedBaggage @pytest.mark.forked @@ -116,7 +117,7 @@ def test_extract_context_sentry_trace_header_baggage(): assert span_context.trace_id == int("1234567890abcdef1234567890abcdef", 16) -def test_inject_continue_trace(sentry_init, SortedBaggage): +def test_inject_continue_trace(sentry_init): sentry_init(traces_sample_rate=1.0) carrier = {} @@ -145,7 +146,7 @@ def test_inject_continue_trace(sentry_init, SortedBaggage): assert (carrier["baggage"]) == SortedBaggage(baggage) -def test_inject_head_sdk(sentry_init, SortedBaggage): +def test_inject_head_sdk(sentry_init): sentry_init(traces_sample_rate=1.0, release="release") carrier = {} @@ -155,5 +156,5 @@ def test_inject_head_sdk(sentry_init, SortedBaggage): SentryPropagator().inject(carrier, setter=setter) assert (carrier["sentry-trace"]) == f"{span.trace_id}-{span.span_id}-1" assert (carrier["baggage"]) == SortedBaggage( - f"sentry-transaction=foo,sentry-release=release,sentry-environment=production,sentry-trace_id={span.trace_id},sentry-sample_rate=1.0,sentry-sampled=true" + f"sentry-transaction=foo,sentry-release=release,sentry-environment=production,sentry-trace_id={span.trace_id},sentry-sample_rate=1.0,sentry-sampled=true" # noqa: E231 ) diff --git a/tests/integrations/rq/test_rq.py b/tests/integrations/rq/test_rq.py index c7eeb377e6..ce0c869c2d 100644 --- a/tests/integrations/rq/test_rq.py +++ b/tests/integrations/rq/test_rq.py @@ -150,7 +150,7 @@ def test_error_has_trace_context_if_tracing_disabled( assert error_event["contexts"]["trace"] -def test_tracing_enabled(sentry_init, capture_events, DictionaryContaining): +def test_tracing_enabled(sentry_init, capture_events, DictionaryContaining): # noqa: N803 sentry_init(integrations=[RqIntegration()], traces_sample_rate=1.0) events = capture_events() diff --git a/tests/integrations/rust_tracing/test_rust_tracing.py b/tests/integrations/rust_tracing/test_rust_tracing.py index 77f07649b2..dc7ee86617 100644 --- a/tests/integrations/rust_tracing/test_rust_tracing.py +++ b/tests/integrations/rust_tracing/test_rust_tracing.py @@ -121,7 +121,6 @@ def test_nested_on_new_span_on_close(sentry_init, capture_events): rust_tracing.new_span(RustTracingLevel.Info, 3, index_arg=10) sentry_first_rust_span = sentry_sdk.get_current_span() - rust_first_rust_span = rust_tracing.spans[3] # Use a different `index_arg` value for the inner span to help # distinguish the two at the end of the test diff --git a/tests/integrations/stdlib/test_httplib.py b/tests/integrations/stdlib/test_httplib.py index 642c707268..cebd8a3d8a 100644 --- a/tests/integrations/stdlib/test_httplib.py +++ b/tests/integrations/stdlib/test_httplib.py @@ -8,10 +8,9 @@ from sentry_sdk import capture_message, start_span, continue_trace, isolation_scope from sentry_sdk.consts import MATCH_ALL, SPANDATA -from sentry_sdk.tracing import Transaction from sentry_sdk.integrations.stdlib import StdlibIntegration -from tests.conftest import ApproxDict, create_mock_http_server +from tests.conftest import ApproxDict, SortedBaggage, create_mock_http_server PORT = create_mock_http_server() @@ -157,7 +156,7 @@ def test_httplib_misuse(sentry_init, capture_events, request): def test_outgoing_trace_headers( - sentry_init, capture_envelopes, capture_request_headers, SortedBaggage + sentry_init, capture_envelopes, capture_request_headers ): sentry_init(traces_sample_rate=1.0) envelopes = capture_envelopes() @@ -202,7 +201,7 @@ def test_outgoing_trace_headers( def test_outgoing_trace_headers_head_sdk( - sentry_init, monkeypatch, capture_request_headers, capture_envelopes, SortedBaggage + sentry_init, monkeypatch, capture_request_headers, capture_envelopes ): # make sure transaction is always sampled monkeypatch.setattr(random, "random", lambda: 0.1) @@ -230,7 +229,7 @@ def test_outgoing_trace_headers_head_sdk( assert request_headers["sentry-trace"] == expected_sentry_trace expected_outgoing_baggage = ( - f"sentry-trace_id={root_span.trace_id}," + f"sentry-trace_id={root_span.trace_id}," # noqa: E231 "sentry-environment=production," "sentry-release=foo," "sentry-sample_rate=0.5," @@ -372,4 +371,4 @@ def test_http_timeout(monkeypatch, sentry_init, capture_envelopes): span = transaction["spans"][0] assert span["op"] == "http.client" - assert span["description"] == f"GET http://localhost:{PORT}/top-chasers" + assert span["description"] == f"GET http://localhost:{PORT}/top-chasers" # noqa: E231 diff --git a/tests/test_api.py b/tests/test_api.py index 6c81c93d21..40bc673dda 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -17,6 +17,7 @@ ) from sentry_sdk.client import Client, NonRecordingClient +from tests.conftest import SortedBaggage @pytest.mark.forked @@ -77,7 +78,7 @@ def test_traceparent_with_tracing_disabled(sentry_init): @pytest.mark.forked -def test_baggage_with_tracing_disabled(sentry_init, SortedBaggage): +def test_baggage_with_tracing_disabled(sentry_init): sentry_init(release="1.0.0", environment="dev") propagation_context = get_isolation_scope()._propagation_context expected_baggage = ( @@ -89,7 +90,7 @@ def test_baggage_with_tracing_disabled(sentry_init, SortedBaggage): @pytest.mark.forked -def test_baggage_with_tracing_enabled(sentry_init, SortedBaggage): +def test_baggage_with_tracing_enabled(sentry_init): sentry_init(traces_sample_rate=1.0, release="1.0.0", environment="dev") with start_span(name="foo") as span: expected_baggage = "sentry-transaction=foo,sentry-trace_id={},sentry-environment=dev,sentry-release=1.0.0,sentry-sample_rate=1.0,sentry-sampled={}".format( diff --git a/tests/tracing/test_integration_tests.py b/tests/tracing/test_integration_tests.py index 975776e8fb..88c0d83b12 100644 --- a/tests/tracing/test_integration_tests.py +++ b/tests/tracing/test_integration_tests.py @@ -10,6 +10,7 @@ ) from sentry_sdk.consts import SPANSTATUS from sentry_sdk.transport import Transport +from tests.conftest import SortedBaggage @pytest.mark.parametrize("sample_rate", [0.0, 1.0]) @@ -51,7 +52,7 @@ def test_basic(sentry_init, capture_events, sample_rate): @pytest.mark.parametrize("sample_rate", [0.0, 1.0]) def test_continue_trace( - sentry_init, capture_envelopes, sample_rate, SortedBaggage + sentry_init, capture_envelopes, sample_rate ): # noqa:N803 """ Ensure data is actually passed along via headers, and that they are read @@ -137,7 +138,6 @@ def test_dynamic_sampling_head_sdk_creates_dsc( capture_envelopes, sample_rate, monkeypatch, - SortedBaggage, # noqa: N803 ): sentry_init(traces_sample_rate=sample_rate, release="foo") envelopes = capture_envelopes() From 2a6112876be52742483e15eb79ed091ac6fa36e8 Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Mon, 20 Jan 2025 12:18:36 +0100 Subject: [PATCH 169/244] lint again --- tests/integrations/rq/test_rq.py | 4 +++- tests/integrations/stdlib/test_httplib.py | 4 +++- tests/tracing/test_integration_tests.py | 4 +--- 3 files changed, 7 insertions(+), 5 deletions(-) diff --git a/tests/integrations/rq/test_rq.py b/tests/integrations/rq/test_rq.py index ce0c869c2d..a57a3d0dec 100644 --- a/tests/integrations/rq/test_rq.py +++ b/tests/integrations/rq/test_rq.py @@ -150,7 +150,9 @@ def test_error_has_trace_context_if_tracing_disabled( assert error_event["contexts"]["trace"] -def test_tracing_enabled(sentry_init, capture_events, DictionaryContaining): # noqa: N803 +def test_tracing_enabled( + sentry_init, capture_events, DictionaryContaining # noqa: N803 +): sentry_init(integrations=[RqIntegration()], traces_sample_rate=1.0) events = capture_events() diff --git a/tests/integrations/stdlib/test_httplib.py b/tests/integrations/stdlib/test_httplib.py index cebd8a3d8a..dfd3c8d085 100644 --- a/tests/integrations/stdlib/test_httplib.py +++ b/tests/integrations/stdlib/test_httplib.py @@ -371,4 +371,6 @@ def test_http_timeout(monkeypatch, sentry_init, capture_envelopes): span = transaction["spans"][0] assert span["op"] == "http.client" - assert span["description"] == f"GET http://localhost:{PORT}/top-chasers" # noqa: E231 + assert ( + span["description"] == f"GET http://localhost:{PORT}/top-chasers" # noqa: E231 + ) diff --git a/tests/tracing/test_integration_tests.py b/tests/tracing/test_integration_tests.py index 88c0d83b12..e2a76a6d98 100644 --- a/tests/tracing/test_integration_tests.py +++ b/tests/tracing/test_integration_tests.py @@ -51,9 +51,7 @@ def test_basic(sentry_init, capture_events, sample_rate): @pytest.mark.parametrize("sample_rate", [0.0, 1.0]) -def test_continue_trace( - sentry_init, capture_envelopes, sample_rate -): # noqa:N803 +def test_continue_trace(sentry_init, capture_envelopes, sample_rate): # noqa:N803 """ Ensure data is actually passed along via headers, and that they are read correctly. From 6754cf25436311c435168d7263c23d15bd489056 Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Mon, 20 Jan 2025 12:42:40 +0100 Subject: [PATCH 170/244] Split out gevent tests (#3962) --- .../workflows/test-integrations-gevent.yml | 81 +++++++++++++++++++ .../workflows/test-integrations-network.yml | 8 -- .../split_tox_gh_actions.py | 4 +- 3 files changed, 84 insertions(+), 9 deletions(-) create mode 100644 .github/workflows/test-integrations-gevent.yml diff --git a/.github/workflows/test-integrations-gevent.yml b/.github/workflows/test-integrations-gevent.yml new file mode 100644 index 0000000000..2517c18cce --- /dev/null +++ b/.github/workflows/test-integrations-gevent.yml @@ -0,0 +1,81 @@ +# Do not edit this YAML file. This file is generated automatically by executing +# python scripts/split_tox_gh_actions/split_tox_gh_actions.py +# The template responsible for it is in +# scripts/split_tox_gh_actions/templates/base.jinja +name: Test Gevent +on: + push: + branches: + - master + - release/** + - potel-base + pull_request: +# Cancel in progress workflows on pull_requests. +# https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true +permissions: + contents: read +env: + BUILD_CACHE_KEY: ${{ github.sha }} + CACHED_BUILD_PATHS: | + ${{ github.workspace }}/dist-serverless +jobs: + test-gevent-pinned: + name: Gevent (pinned) + timeout-minutes: 30 + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + python-version: ["3.8","3.10","3.11","3.12"] + os: [ubuntu-22.04] + steps: + - uses: actions/checkout@v4.2.2 + - uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + allow-prereleases: true + - name: Setup Test Env + run: | + pip install "coverage[toml]" tox + - name: Erase coverage + run: | + coverage erase + - name: Test gevent pinned + run: | + set -x # print commands that are executed + ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-gevent" + - name: Generate coverage XML + if: ${{ !cancelled() }} + run: | + coverage combine .coverage-sentry-* + coverage xml + - name: Upload coverage to Codecov + if: ${{ !cancelled() }} + uses: codecov/codecov-action@v5.1.2 + with: + token: ${{ secrets.CODECOV_TOKEN }} + files: coverage.xml + # make sure no plugins alter our coverage reports + plugin: noop + verbose: true + - name: Upload test results to Codecov + if: ${{ !cancelled() }} + uses: codecov/test-results-action@v1 + with: + token: ${{ secrets.CODECOV_TOKEN }} + files: .junitxml + verbose: true + check_required_tests: + name: All pinned Gevent tests passed + needs: test-gevent-pinned + # Always run this, even if a dependent job failed + if: always() + runs-on: ubuntu-20.04 + steps: + - name: Check for failures + if: contains(needs.test-gevent-pinned.result, 'failure') || contains(needs.test-gevent-pinned.result, 'skipped') + run: | + echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1 diff --git a/.github/workflows/test-integrations-network.yml b/.github/workflows/test-integrations-network.yml index de02b30d8c..a6cafb2b66 100644 --- a/.github/workflows/test-integrations-network.yml +++ b/.github/workflows/test-integrations-network.yml @@ -43,10 +43,6 @@ jobs: - name: Erase coverage run: | coverage erase - - name: Test gevent latest - run: | - set -x # print commands that are executed - ./scripts/runtox.sh "py${{ matrix.python-version }}-gevent-latest" - name: Test grpc latest run: | set -x # print commands that are executed @@ -101,10 +97,6 @@ jobs: - name: Erase coverage run: | coverage erase - - name: Test gevent pinned - run: | - set -x # print commands that are executed - ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-gevent" - name: Test grpc pinned run: | set -x # print commands that are executed diff --git a/scripts/split_tox_gh_actions/split_tox_gh_actions.py b/scripts/split_tox_gh_actions/split_tox_gh_actions.py index 1537ad8389..4effe0b1ba 100755 --- a/scripts/split_tox_gh_actions/split_tox_gh_actions.py +++ b/scripts/split_tox_gh_actions/split_tox_gh_actions.py @@ -58,6 +58,9 @@ "Common": [ "common", ], + "Gevent": [ + "gevent", + ], "AI": [ "anthropic", "cohere", @@ -96,7 +99,6 @@ "strawberry", ], "Network": [ - "gevent", "grpc", "httpx", "requests", From 27002906565bdeb6a0c7c7b3de412bad0a252487 Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Mon, 20 Jan 2025 13:04:07 +0100 Subject: [PATCH 171/244] Use httpx_mock in test_httpx --- tests/integrations/httpx/test_httpx.py | 24 +++++++++++++++--------- 1 file changed, 15 insertions(+), 9 deletions(-) diff --git a/tests/integrations/httpx/test_httpx.py b/tests/integrations/httpx/test_httpx.py index f0dc410b14..a8a509152f 100644 --- a/tests/integrations/httpx/test_httpx.py +++ b/tests/integrations/httpx/test_httpx.py @@ -16,7 +16,9 @@ "httpx_client", (httpx.Client(), httpx.AsyncClient()), ) -def test_crumb_capture_and_hint(sentry_init, capture_events, httpx_client): +def test_crumb_capture_and_hint(sentry_init, capture_events, httpx_client, httpx_mock): + httpx_mock.add_response() + def before_breadcrumb(crumb, hint): crumb["data"]["extra"] = "foo" return crumb @@ -24,7 +26,6 @@ def before_breadcrumb(crumb, hint): sentry_init(integrations=[HttpxIntegration()], before_breadcrumb=before_breadcrumb) url = "http://example.com/" - responses.add(responses.GET, url, status=200) with start_span(): events = capture_events() @@ -61,7 +62,9 @@ def before_breadcrumb(crumb, hint): "httpx_client", (httpx.Client(), httpx.AsyncClient()), ) -def test_outgoing_trace_headers(sentry_init, httpx_client, capture_envelopes): +def test_outgoing_trace_headers(sentry_init, httpx_client, capture_envelopes, httpx_mock): + httpx_mock.add_response() + sentry_init( traces_sample_rate=1.0, integrations=[HttpxIntegration()], @@ -70,7 +73,6 @@ def test_outgoing_trace_headers(sentry_init, httpx_client, capture_envelopes): envelopes = capture_envelopes() url = "http://example.com/" - responses.add(responses.GET, url, status=200) with start_span( name="/interactions/other-dogs/new-dog", @@ -104,7 +106,10 @@ def test_outgoing_trace_headers_append_to_baggage( sentry_init, httpx_client, capture_envelopes, + httpx_mock, ): + httpx_mock.add_response() + sentry_init( traces_sample_rate=1.0, integrations=[HttpxIntegration()], @@ -114,7 +119,6 @@ def test_outgoing_trace_headers_append_to_baggage( envelopes = capture_envelopes() url = "http://example.com/" - responses.add(responses.GET, url, status=200) with start_span( name="/interactions/other-dogs/new-dog", @@ -306,12 +310,13 @@ def test_propagates_twp_outside_root_span(sentry_init, httpx_mock): @pytest.mark.tests_internal_exceptions -def test_omit_url_data_if_parsing_fails(sentry_init, capture_events): +def test_omit_url_data_if_parsing_fails(sentry_init, capture_events, httpx_mock): + httpx_mock.add_response() + sentry_init(integrations=[HttpxIntegration()]) httpx_client = httpx.Client() url = "http://example.com" - responses.add(responses.GET, url, status=200) events = capture_events() with mock.patch( @@ -342,7 +347,9 @@ def test_omit_url_data_if_parsing_fails(sentry_init, capture_events): "httpx_client", (httpx.Client(), httpx.AsyncClient()), ) -def test_span_origin(sentry_init, capture_events, httpx_client): +def test_span_origin(sentry_init, capture_events, httpx_client, httpx_mock): + httpx_mock.add_response() + sentry_init( integrations=[HttpxIntegration()], traces_sample_rate=1.0, @@ -351,7 +358,6 @@ def test_span_origin(sentry_init, capture_events, httpx_client): events = capture_events() url = "http://example.com/" - responses.add(responses.GET, url, status=200) with start_span(name="test_root_span"): if asyncio.iscoroutinefunction(httpx_client.get): From 72620d10a50acb647080716bf5c873894827b47d Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Mon, 20 Jan 2025 14:15:53 +0100 Subject: [PATCH 172/244] Replace old Span/Transaction completely with POTelSpan (#3966) * `POTelSpan` no longer exists * Old `Span/Transaction` both point to new `Span` * `NoOpSpan` is still around to enable future potential errors-only stuff closes #3968 --- sentry_sdk/ai/monitoring.py | 2 +- sentry_sdk/ai/utils.py | 2 +- sentry_sdk/api.py | 8 +- sentry_sdk/integrations/langchain.py | 2 +- .../integrations/opentelemetry/scope.py | 8 +- .../opentelemetry/span_processor.py | 2 +- sentry_sdk/integrations/rust_tracing.py | 10 +- sentry_sdk/integrations/wsgi.py | 4 +- sentry_sdk/scope.py | 23 +- sentry_sdk/tracing.py | 960 +----------------- sentry_sdk/tracing_utils.py | 6 - tests/integrations/asyncpg/test_asyncpg.py | 4 +- tests/integrations/httpx/test_httpx.py | 5 +- .../integrations/opentelemetry/test_compat.py | 4 +- tests/tracing/test_http_headers.py | 56 - 15 files changed, 60 insertions(+), 1036 deletions(-) delete mode 100644 tests/tracing/test_http_headers.py diff --git a/sentry_sdk/ai/monitoring.py b/sentry_sdk/ai/monitoring.py index e826f3bf90..e149ebe7df 100644 --- a/sentry_sdk/ai/monitoring.py +++ b/sentry_sdk/ai/monitoring.py @@ -3,7 +3,7 @@ import sentry_sdk.utils from sentry_sdk import start_span -from sentry_sdk.tracing import POTelSpan as Span +from sentry_sdk.tracing import Span from sentry_sdk.utils import ContextVar from typing import TYPE_CHECKING diff --git a/sentry_sdk/ai/utils.py b/sentry_sdk/ai/utils.py index 4a972071a9..ed3494f679 100644 --- a/sentry_sdk/ai/utils.py +++ b/sentry_sdk/ai/utils.py @@ -3,7 +3,7 @@ if TYPE_CHECKING: from typing import Any -from sentry_sdk.tracing import POTelSpan as Span +from sentry_sdk.tracing import Span from sentry_sdk.utils import logger diff --git a/sentry_sdk/api.py b/sentry_sdk/api.py index a44d3f440e..86577cc500 100644 --- a/sentry_sdk/api.py +++ b/sentry_sdk/api.py @@ -3,7 +3,7 @@ from sentry_sdk import tracing_utils, Client from sentry_sdk._init_implementation import init -from sentry_sdk.tracing import POTelSpan, Transaction, trace +from sentry_sdk.tracing import trace from sentry_sdk.crons import monitor # TODO-neel-potel make 2 scope strategies/impls and switch @@ -239,7 +239,7 @@ def flush( def start_span(**kwargs): - # type: (type.Any) -> POTelSpan + # type: (type.Any) -> Span """ Start and return a span. @@ -256,10 +256,10 @@ def start_span(**kwargs): def start_transaction( - transaction=None, # type: Optional[Transaction] + transaction=None, # type: Optional[Span] **kwargs, # type: Unpack[TransactionKwargs] ): - # type: (...) -> POTelSpan + # type: (...) -> Span """ .. deprecated:: 3.0.0 This function is deprecated and will be removed in a future release. diff --git a/sentry_sdk/integrations/langchain.py b/sentry_sdk/integrations/langchain.py index deb700bde2..c775f9d92b 100644 --- a/sentry_sdk/integrations/langchain.py +++ b/sentry_sdk/integrations/langchain.py @@ -6,7 +6,7 @@ from sentry_sdk.consts import OP, SPANDATA, SPANSTATUS from sentry_sdk.ai.utils import set_data_normalized from sentry_sdk.scope import should_send_default_pii -from sentry_sdk.tracing import POTelSpan as Span +from sentry_sdk.tracing import Span from sentry_sdk.integrations import DidNotEnable, Integration from sentry_sdk.utils import logger, capture_internal_exceptions diff --git a/sentry_sdk/integrations/opentelemetry/scope.py b/sentry_sdk/integrations/opentelemetry/scope.py index 89da1af68c..d16215ab20 100644 --- a/sentry_sdk/integrations/opentelemetry/scope.py +++ b/sentry_sdk/integrations/opentelemetry/scope.py @@ -28,7 +28,7 @@ ) from sentry_sdk.integrations.opentelemetry.utils import trace_state_from_baggage from sentry_sdk.scope import Scope, ScopeType -from sentry_sdk.tracing import POTelSpan +from sentry_sdk.tracing import Span from sentry_sdk._types import TYPE_CHECKING if TYPE_CHECKING: @@ -128,7 +128,7 @@ def _incoming_otel_span_context(self): return span_context def start_transaction(self, **kwargs): - # type: (Unpack[TransactionKwargs]) -> POTelSpan + # type: (Unpack[TransactionKwargs]) -> Span """ .. deprecated:: 3.0.0 This function is deprecated and will be removed in a future release. @@ -137,8 +137,8 @@ def start_transaction(self, **kwargs): return self.start_span(**kwargs) def start_span(self, **kwargs): - # type: (Any) -> POTelSpan - return POTelSpan(**kwargs, scope=self) + # type: (Any) -> Span + return Span(**kwargs, scope=self) _INITIAL_CURRENT_SCOPE = None diff --git a/sentry_sdk/integrations/opentelemetry/span_processor.py b/sentry_sdk/integrations/opentelemetry/span_processor.py index 8d513ec97d..a3cf545daf 100644 --- a/sentry_sdk/integrations/opentelemetry/span_processor.py +++ b/sentry_sdk/integrations/opentelemetry/span_processor.py @@ -98,7 +98,7 @@ def force_flush(self, timeout_millis=30000): def _add_root_span(self, span, parent_span): # type: (Span, AbstractSpan) -> None """ - This is required to make POTelSpan.root_span work + This is required to make Span.root_span work since we can't traverse back to the root purely with otel efficiently. """ if parent_span != INVALID_SPAN and not parent_span.get_span_context().is_remote: diff --git a/sentry_sdk/integrations/rust_tracing.py b/sentry_sdk/integrations/rust_tracing.py index ccf4e5aae0..9b5a83197e 100644 --- a/sentry_sdk/integrations/rust_tracing.py +++ b/sentry_sdk/integrations/rust_tracing.py @@ -37,7 +37,7 @@ import sentry_sdk from sentry_sdk.integrations import Integration from sentry_sdk.scope import should_send_default_pii -from sentry_sdk.tracing import POTelSpan as SentrySpan +from sentry_sdk.tracing import Span from sentry_sdk.utils import SENSITIVE_DATA_SUBSTITUTE @@ -169,7 +169,7 @@ def _include_tracing_fields(self) -> bool: else self.include_tracing_fields ) - def on_event(self, event: str, _span_state: Optional[SentrySpan]) -> None: + def on_event(self, event: str, _span_state: Optional[Span]) -> None: deserialized_event = json.loads(event) metadata = deserialized_event.get("metadata", {}) @@ -183,7 +183,7 @@ def on_event(self, event: str, _span_state: Optional[SentrySpan]) -> None: elif event_type == EventTypeMapping.Event: process_event(deserialized_event) - def on_new_span(self, attrs: str, span_id: str) -> Optional[SentrySpan]: + def on_new_span(self, attrs: str, span_id: str) -> Optional[Span]: attrs = json.loads(attrs) metadata = attrs.get("metadata", {}) @@ -220,11 +220,11 @@ def on_new_span(self, attrs: str, span_id: str) -> Optional[SentrySpan]: return span - def on_close(self, span_id: str, span: Optional[SentrySpan]) -> None: + def on_close(self, span_id: str, span: Optional[Span]) -> None: if span is not None: span.__exit__(None, None, None) - def on_record(self, span_id: str, values: str, span: Optional[SentrySpan]) -> None: + def on_record(self, span_id: str, values: str, span: Optional[Span]) -> None: if span is not None: deserialized_values = json.loads(values) for key, value in deserialized_values.items(): diff --git a/sentry_sdk/integrations/wsgi.py b/sentry_sdk/integrations/wsgi.py index 7f7360a341..e9cc65d716 100644 --- a/sentry_sdk/integrations/wsgi.py +++ b/sentry_sdk/integrations/wsgi.py @@ -12,7 +12,7 @@ _request_headers_to_span_attributes, ) from sentry_sdk.sessions import track_session -from sentry_sdk.tracing import Transaction, TRANSACTION_SOURCE_ROUTE +from sentry_sdk.tracing import Span, TRANSACTION_SOURCE_ROUTE from sentry_sdk.utils import ( ContextVar, capture_internal_exceptions, @@ -157,7 +157,7 @@ def __call__(self, environ, start_response): def _sentry_start_response( # type: ignore old_start_response, # type: StartResponse - transaction, # type: Optional[Transaction] + transaction, # type: Optional[Span] status, # type: str response_headers, # type: WsgiResponseHeaders exc_info=None, # type: Optional[WsgiExcInfo] diff --git a/sentry_sdk/scope.py b/sentry_sdk/scope.py index 09595f88d2..af69aca4ee 100644 --- a/sentry_sdk/scope.py +++ b/sentry_sdk/scope.py @@ -26,8 +26,6 @@ SENTRY_TRACE_HEADER_NAME, NoOpSpan, Span, - POTelSpan, - Transaction, ) from sentry_sdk.utils import ( capture_internal_exception, @@ -677,7 +675,7 @@ def clear(self): self.clear_breadcrumbs() self._should_capture = True # type: bool - self._span = None # type: Optional[POTelSpan] + self._span = None # type: Optional[Span] self._session = None # type: Optional[Session] self._force_auto_session_tracking = None # type: Optional[bool] @@ -707,7 +705,7 @@ def fingerprint(self, value): @property def transaction(self): # type: () -> Any - # would be type: () -> Optional[Transaction], see https://github.com/python/mypy/issues/3004 + # would be type: () -> Optional[Span], see https://github.com/python/mypy/issues/3004 """Return the transaction (root span) in the scope, if any.""" # there is no span/transaction on the scope @@ -734,7 +732,7 @@ def transaction(self, value): # anything set in the scope. # XXX: note that with the introduction of the Scope.transaction getter, # there is a semantic and type mismatch between getter and setter. The - # getter returns a Transaction, the setter sets a transaction name. + # getter returns a Span, the setter sets a transaction name. # Without breaking version compatibility, we could make the setter set a # transaction name or transaction (self._span) depending on the type of # the value argument. @@ -785,13 +783,13 @@ def set_user(self, value): @property def span(self): - # type: () -> Optional[POTelSpan] + # type: () -> Optional[Span] """Get current tracing span.""" return self._span @span.setter def span(self, span): - # type: (Optional[POTelSpan]) -> None + # type: (Optional[Span]) -> None """Set current tracing span.""" self._span = span @@ -952,7 +950,7 @@ def add_breadcrumb(self, crumb=None, hint=None, **kwargs): self._breadcrumbs.popleft() def start_transaction(self, transaction=None, **kwargs): - # type: (Optional[Transaction], Optional[SamplingContext], Unpack[TransactionKwargs]) -> Union[Transaction, NoOpSpan] + # type: (Optional[Span], Optional[SamplingContext], Unpack[TransactionKwargs]) -> Union[Span, NoOpSpan] """ Start and return a transaction. @@ -981,6 +979,7 @@ def start_transaction(self, transaction=None, **kwargs): constructor. See :py:class:`sentry_sdk.tracing.Transaction` for available arguments. """ + # TODO-neel-potel fix signature and no op kwargs.setdefault("scope", self) client = self.get_client() @@ -988,7 +987,7 @@ def start_transaction(self, transaction=None, **kwargs): try_autostart_continuous_profiler() # if we haven't been given a transaction, make one - transaction = Transaction(**kwargs) + transaction = Span(**kwargs) # use traces_sample_rate, traces_sampler, and/or inheritance to make a # sampling decision @@ -1024,6 +1023,7 @@ def start_span(self, **kwargs): For supported `**kwargs` see :py:class:`sentry_sdk.tracing.Span`. """ + # TODO-neel-potel fix signature and no op if kwargs.get("description") is not None: warnings.warn( "The `description` parameter is deprecated. Please use `name` instead.", @@ -1054,13 +1054,14 @@ def start_span(self, **kwargs): def continue_trace( self, environ_or_headers, op=None, name=None, source=None, origin=None ): - # type: (Dict[str, Any], Optional[str], Optional[str], Optional[str], Optional[str]) -> Transaction + # TODO-neel-potel fix signature and no op + # type: (Dict[str, Any], Optional[str], Optional[str], Optional[str], Optional[str]) -> Span """ Sets the propagation context from environment or headers and returns a transaction. """ self.generate_propagation_context(environ_or_headers) - transaction = Transaction.continue_from_headers( + transaction = Span.continue_from_headers( normalize_incoming_data(environ_or_headers), op=op, origin=origin, diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py index 3ee155aedb..e67301e1a7 100644 --- a/sentry_sdk/tracing.py +++ b/sentry_sdk/tracing.py @@ -1,9 +1,5 @@ import json -import uuid -import random -import time -import warnings -from datetime import datetime, timedelta, timezone +from datetime import datetime from opentelemetry import trace as otel_trace, context from opentelemetry.trace import ( @@ -19,22 +15,19 @@ import sentry_sdk from sentry_sdk.consts import SPANSTATUS, SPANDATA -from sentry_sdk.profiler.continuous_profiler import get_profiler_id from sentry_sdk.utils import ( _serialize_span_attribute, get_current_thread_meta, - is_valid_sample_rate, logger, ) from typing import TYPE_CHECKING, cast if TYPE_CHECKING: - from collections.abc import Callable, Mapping, MutableMapping + from collections.abc import Callable from typing import Any from typing import Dict from typing import Iterator - from typing import List from typing import Optional from typing import overload from typing import ParamSpec @@ -42,21 +35,19 @@ from typing import Union from typing import TypeVar - from typing_extensions import TypedDict, Unpack - - from opentelemetry.utils import types as OTelSpanAttributes + from typing_extensions import TypedDict P = ParamSpec("P") R = TypeVar("R") import sentry_sdk.profiler from sentry_sdk._types import ( - Event, MeasurementUnit, SamplingContext, - MeasurementValue, ) + from sentry_sdk.tracing_utils import Baggage + class SpanKwargs(TypedDict, total=False): trace_id: str """ @@ -91,7 +82,7 @@ class SpanKwargs(TypedDict, total=False): status: str """The span's status. Possible values are listed at https://develop.sentry.dev/sdk/event-payloads/span/""" - containing_transaction: Optional["Transaction"] + containing_transaction: Optional["Span"] """The transaction that this span belongs to.""" start_timestamp: Optional[Union[datetime, float]] @@ -208,914 +199,14 @@ def get_span_status_from_http_code(http_status_code): return SPANSTATUS.UNKNOWN_ERROR -class _SpanRecorder: - """Limits the number of spans recorded in a transaction.""" - - __slots__ = ("maxlen", "spans") - - def __init__(self, maxlen): - # type: (int) -> None - # FIXME: this is `maxlen - 1` only to preserve historical behavior - # enforced by tests. - # Either this should be changed to `maxlen` or the JS SDK implementation - # should be changed to match a consistent interpretation of what maxlen - # limits: either transaction+spans or only child spans. - self.maxlen = maxlen - 1 - self.spans = [] # type: List[Span] - - def add(self, span): - # type: (Span) -> None - if len(self.spans) > self.maxlen: - span._span_recorder = None - else: - self.spans.append(span) - - -class Span: - """A span holds timing information of a block of code. - Spans can have multiple child spans thus forming a span tree. - - :param trace_id: The trace ID of the root span. If this new span is to be the root span, - omit this parameter, and a new trace ID will be generated. - :param span_id: The span ID of this span. If omitted, a new span ID will be generated. - :param parent_span_id: The span ID of the parent span, if applicable. - :param same_process_as_parent: Whether this span is in the same process as the parent span. - :param sampled: Whether the span should be sampled. Overrides the default sampling decision - for this span when provided. - :param op: The span's operation. A list of recommended values is available here: - https://develop.sentry.dev/sdk/performance/span-operations/ - :param description: A description of what operation is being performed within the span. - - .. deprecated:: 2.15.0 - Please use the `name` parameter, instead. - :param name: A string describing what operation is being performed within the span. - :param hub: The hub to use for this span. - - .. deprecated:: 2.0.0 - Please use the `scope` parameter, instead. - :param status: The span's status. Possible values are listed at - https://develop.sentry.dev/sdk/event-payloads/span/ - :param containing_transaction: The transaction that this span belongs to. - :param start_timestamp: The timestamp when the span started. If omitted, the current time - will be used. - :param scope: The scope to use for this span. If not provided, we use the current scope. - """ - - __slots__ = ( - "trace_id", - "span_id", - "parent_span_id", - "same_process_as_parent", - "sampled", - "op", - "description", - "_measurements", - "start_timestamp", - "_start_timestamp_monotonic_ns", - "status", - "timestamp", - "_tags", - "_data", - "_span_recorder", - "_context_manager_state", - "_containing_transaction", - "scope", - "origin", - "name", - ) - - def __init__( - self, - trace_id=None, # type: Optional[str] - span_id=None, # type: Optional[str] - parent_span_id=None, # type: Optional[str] - same_process_as_parent=True, # type: bool - sampled=None, # type: Optional[bool] - op=None, # type: Optional[str] - description=None, # type: Optional[str] - status=None, # type: Optional[str] - containing_transaction=None, # type: Optional[Transaction] - start_timestamp=None, # type: Optional[Union[datetime, float]] - scope=None, # type: Optional[sentry_sdk.Scope] - origin=None, # type: Optional[str] - name=None, # type: Optional[str] - ): - # type: (...) -> None - self.trace_id = trace_id or uuid.uuid4().hex - self.span_id = span_id or uuid.uuid4().hex[16:] - self.parent_span_id = parent_span_id - self.same_process_as_parent = same_process_as_parent - self.sampled = sampled - self.op = op - self.description = name or description - self.status = status - self.scope = scope - self.origin = origin or DEFAULT_SPAN_ORIGIN - self._measurements = {} # type: Dict[str, MeasurementValue] - self._tags = {} # type: MutableMapping[str, str] - self._data = {} # type: Dict[str, Any] - self._containing_transaction = containing_transaction - - if start_timestamp is None: - start_timestamp = datetime.now(timezone.utc) - elif isinstance(start_timestamp, float): - start_timestamp = datetime.fromtimestamp(start_timestamp, timezone.utc) - self.start_timestamp = start_timestamp - try: - # profiling depends on this value and requires that - # it is measured in nanoseconds - self._start_timestamp_monotonic_ns = time.perf_counter_ns() - except AttributeError: - pass - - #: End timestamp of span - self.timestamp = None # type: Optional[datetime] - - self._span_recorder = None # type: Optional[_SpanRecorder] - - self.update_active_thread() - self.set_profiler_id(get_profiler_id()) - - # TODO this should really live on the Transaction class rather than the Span - # class - def init_span_recorder(self, maxlen): - # type: (int) -> None - if self._span_recorder is None: - self._span_recorder = _SpanRecorder(maxlen) - - def __repr__(self): - # type: () -> str - return ( - "<%s(op=%r, description:%r, trace_id=%r, span_id=%r, parent_span_id=%r, sampled=%r, origin=%r)>" - % ( - self.__class__.__name__, - self.op, - self.description, - self.trace_id, - self.span_id, - self.parent_span_id, - self.sampled, - self.origin, - ) - ) - - def __enter__(self): - # type: () -> Span - scope = self.scope or sentry_sdk.get_current_scope() - old_span = scope.span - scope.span = self - self._context_manager_state = (scope, old_span) - return self - - def __exit__(self, ty, value, tb): - # type: (Optional[Any], Optional[Any], Optional[Any]) -> None - if value is not None: - self.set_status(SPANSTATUS.INTERNAL_ERROR) - - scope, old_span = self._context_manager_state - del self._context_manager_state - self.finish(scope) - scope.span = old_span - - @property - def containing_transaction(self): - # type: () -> Optional[Transaction] - """The ``Transaction`` that this span belongs to. - The ``Transaction`` is the root of the span tree, - so one could also think of this ``Transaction`` as the "root span".""" - - # this is a getter rather than a regular attribute so that transactions - # can return `self` here instead (as a way to prevent them circularly - # referencing themselves) - return self._containing_transaction - - def start_child(self, **kwargs): - # type: (**Any) -> Span - """ - Start a sub-span from the current span or transaction. - - Takes the same arguments as the initializer of :py:class:`Span`. The - trace id, sampling decision, transaction pointer, and span recorder are - inherited from the current span/transaction. - - The instrumenter parameter is deprecated for user code, and it will - be removed in the next major version. Going forward, it should only - be used by the SDK itself. - """ - if kwargs.get("description") is not None: - warnings.warn( - "The `description` parameter is deprecated. Please use `name` instead.", - DeprecationWarning, - stacklevel=2, - ) - - kwargs.setdefault("sampled", self.sampled) - - child = Span( - trace_id=self.trace_id, - parent_span_id=self.span_id, - containing_transaction=self.containing_transaction, - **kwargs, - ) - - span_recorder = ( - self.containing_transaction and self.containing_transaction._span_recorder - ) - if span_recorder: - span_recorder.add(child) - - return child - - @classmethod - def continue_from_environ( - cls, - environ, # type: Mapping[str, str] - **kwargs, # type: Any - ): - # type: (...) -> Transaction - """ - Create a Transaction with the given params, then add in data pulled from - the ``sentry-trace`` and ``baggage`` headers from the environ (if any) - before returning the Transaction. - - This is different from :py:meth:`~sentry_sdk.tracing.Span.continue_from_headers` - in that it assumes header names in the form ``HTTP_HEADER_NAME`` - - such as you would get from a WSGI/ASGI environ - - rather than the form ``header-name``. - - :param environ: The ASGI/WSGI environ to pull information from. - """ - if cls is Span: - logger.warning( - "Deprecated: use Transaction.continue_from_environ " - "instead of Span.continue_from_environ." - ) - return Transaction.continue_from_headers(EnvironHeaders(environ), **kwargs) - - @classmethod - def continue_from_headers( - cls, - headers, # type: Mapping[str, str] - **kwargs, # type: Any - ): - # type: (...) -> Transaction - """ - Create a transaction with the given params (including any data pulled from - the ``sentry-trace`` and ``baggage`` headers). - - :param headers: The dictionary with the HTTP headers to pull information from. - """ - # TODO move this to the Transaction class - if cls is Span: - logger.warning( - "Deprecated: use Transaction.continue_from_headers " - "instead of Span.continue_from_headers." - ) - - # TODO-neel move away from this kwargs stuff, it's confusing and opaque - # make more explicit - baggage = Baggage.from_incoming_header(headers.get(BAGGAGE_HEADER_NAME)) - kwargs.update({BAGGAGE_HEADER_NAME: baggage}) - - sentrytrace_kwargs = extract_sentrytrace_data( - headers.get(SENTRY_TRACE_HEADER_NAME) - ) - - if sentrytrace_kwargs is not None: - kwargs.update(sentrytrace_kwargs) - - # If there's an incoming sentry-trace but no incoming baggage header, - # for instance in traces coming from older SDKs, - # baggage will be empty and immutable and won't be populated as head SDK. - baggage.freeze() - - transaction = Transaction(**kwargs) - transaction.same_process_as_parent = False - - return transaction - - def iter_headers(self): - # type: () -> Iterator[Tuple[str, str]] - """ - Creates a generator which returns the span's ``sentry-trace`` and ``baggage`` headers. - If the span's containing transaction doesn't yet have a ``baggage`` value, - this will cause one to be generated and stored. - """ - if not self.containing_transaction: - # Do not propagate headers if there is no containing transaction. Otherwise, this - # span ends up being the root span of a new trace, and since it does not get sent - # to Sentry, the trace will be missing a root transaction. The dynamic sampling - # context will also be missing, breaking dynamic sampling & traces. - return - - yield SENTRY_TRACE_HEADER_NAME, self.to_traceparent() - - baggage = self.containing_transaction.get_baggage().serialize() - if baggage: - yield BAGGAGE_HEADER_NAME, baggage - - @classmethod - def from_traceparent( - cls, - traceparent, # type: Optional[str] - **kwargs, # type: Any - ): - # type: (...) -> Optional[Transaction] - """ - DEPRECATED: Use :py:meth:`sentry_sdk.tracing.Span.continue_from_headers`. - - Create a ``Transaction`` with the given params, then add in data pulled from - the given ``sentry-trace`` header value before returning the ``Transaction``. - """ - logger.warning( - "Deprecated: Use Transaction.continue_from_headers(headers, **kwargs) " - "instead of from_traceparent(traceparent, **kwargs)" - ) - - if not traceparent: - return None - - return cls.continue_from_headers( - {SENTRY_TRACE_HEADER_NAME: traceparent}, **kwargs - ) - - def to_traceparent(self): - # type: () -> str - if self.sampled is True: - sampled = "1" - elif self.sampled is False: - sampled = "0" - else: - sampled = None - - traceparent = "%s-%s" % (self.trace_id, self.span_id) - if sampled is not None: - traceparent += "-%s" % (sampled,) - - return traceparent - - def to_baggage(self): - # type: () -> Optional[Baggage] - """Returns the :py:class:`~sentry_sdk.tracing_utils.Baggage` - associated with this ``Span``, if any. (Taken from the root of the span tree.) - """ - if self.containing_transaction: - return self.containing_transaction.get_baggage() - return None - - def set_tag(self, key, value): - # type: (str, Any) -> None - self._tags[key] = value - - def set_data(self, key, value): - # type: (str, Any) -> None - self._data[key] = value - - def set_status(self, value): - # type: (str) -> None - self.status = value - - def set_measurement(self, name, value, unit=""): - # type: (str, float, MeasurementUnit) -> None - self._measurements[name] = {"value": value, "unit": unit} - - def set_thread(self, thread_id, thread_name): - # type: (Optional[int], Optional[str]) -> None - - if thread_id is not None: - self.set_data(SPANDATA.THREAD_ID, str(thread_id)) - - if thread_name is not None: - self.set_data(SPANDATA.THREAD_NAME, thread_name) - - def set_profiler_id(self, profiler_id): - # type: (Optional[str]) -> None - if profiler_id is not None: - self.set_data(SPANDATA.PROFILER_ID, profiler_id) - - def set_http_status(self, http_status): - # type: (int) -> None - self.set_tag( - "http.status_code", str(http_status) - ) # we keep this for backwards compatibility - self.set_data(SPANDATA.HTTP_STATUS_CODE, http_status) - self.set_status(get_span_status_from_http_code(http_status)) - - def is_success(self): - # type: () -> bool - return self.status == "ok" - - def finish(self, scope=None, end_timestamp=None): - # type: (Optional[sentry_sdk.Scope], Optional[Union[float, datetime]]) -> Optional[str] - """ - Sets the end timestamp of the span. - - Additionally it also creates a breadcrumb from the span, - if the span represents a database or HTTP request. - - :param scope: The scope to use for this transaction. - If not provided, the current scope will be used. - :param end_timestamp: Optional timestamp that should - be used as timestamp instead of the current time. - - :return: Always ``None``. The type is ``Optional[str]`` to match - the return value of :py:meth:`sentry_sdk.tracing.Transaction.finish`. - """ - if self.timestamp is not None: - # This span is already finished, ignore. - return None - - try: - if end_timestamp: - if isinstance(end_timestamp, float): - end_timestamp = datetime.fromtimestamp(end_timestamp, timezone.utc) - self.timestamp = end_timestamp - else: - elapsed = time.perf_counter_ns() - self._start_timestamp_monotonic_ns - self.timestamp = self.start_timestamp + timedelta( - microseconds=elapsed / 1000 - ) - except AttributeError: - self.timestamp = datetime.now(timezone.utc) - - scope = scope or sentry_sdk.get_current_scope() - maybe_create_breadcrumbs_from_span(scope, self) - - return None - - def to_json(self): - # type: () -> Dict[str, Any] - """Returns a JSON-compatible representation of the span.""" - - rv = { - "trace_id": self.trace_id, - "span_id": self.span_id, - "parent_span_id": self.parent_span_id, - "same_process_as_parent": self.same_process_as_parent, - "op": self.op, - "description": self.description, - "start_timestamp": self.start_timestamp, - "timestamp": self.timestamp, - "origin": self.origin, - } # type: Dict[str, Any] - - if self.status: - self._tags["status"] = self.status - - if len(self._measurements) > 0: - rv["measurements"] = self._measurements - - tags = self._tags - if tags: - rv["tags"] = tags - - data = self._data - if data: - rv["data"] = data - - return rv - - def get_trace_context(self): - # type: () -> Any - rv = { - "trace_id": self.trace_id, - "span_id": self.span_id, - "parent_span_id": self.parent_span_id, - "op": self.op, - "description": self.description, - "origin": self.origin, - } # type: Dict[str, Any] - if self.status: - rv["status"] = self.status - - if self.containing_transaction: - rv["dynamic_sampling_context"] = ( - self.containing_transaction.get_baggage().dynamic_sampling_context() - ) - - data = {} - - thread_id = self._data.get(SPANDATA.THREAD_ID) - if thread_id is not None: - data["thread.id"] = thread_id - - thread_name = self._data.get(SPANDATA.THREAD_NAME) - if thread_name is not None: - data["thread.name"] = thread_name - - if data: - rv["data"] = data - - return rv - - def get_profile_context(self): - # type: () -> Optional[ProfileContext] - profiler_id = self._data.get(SPANDATA.PROFILER_ID) - if profiler_id is None: - return None - - return { - "profiler_id": profiler_id, - } - - def update_active_thread(self): - # type: () -> None - thread_id, thread_name = get_current_thread_meta() - self.set_thread(thread_id, thread_name) - - -class Transaction(Span): - """The Transaction is the root element that holds all the spans - for Sentry performance instrumentation. - - :param name: Identifier of the transaction. - Will show up in the Sentry UI. - :param parent_sampled: Whether the parent transaction was sampled. - If True this transaction will be kept, if False it will be discarded. - :param baggage: The W3C baggage header value. - (see https://www.w3.org/TR/baggage/) - :param source: A string describing the source of the transaction name. - This will be used to determine the transaction's type. - See https://develop.sentry.dev/sdk/event-payloads/transaction/#transaction-annotations - for more information. Default "custom". - :param kwargs: Additional arguments to be passed to the Span constructor. - See :py:class:`sentry_sdk.tracing.Span` for available arguments. - """ - - __slots__ = ( - "name", - "source", - "parent_sampled", - # used to create baggage value for head SDKs in dynamic sampling - "sample_rate", - "_measurements", - "_contexts", - "_profile", - "_baggage", - ) - - def __init__( # type: ignore[misc] - self, - name="", # type: str - parent_sampled=None, # type: Optional[bool] - baggage=None, # type: Optional[Baggage] - source=TRANSACTION_SOURCE_CUSTOM, # type: str - **kwargs, # type: Unpack[SpanKwargs] - ): - # type: (...) -> None - - super().__init__(**kwargs) - - self.name = name - self.source = source - self.sample_rate = None # type: Optional[float] - self.parent_sampled = parent_sampled - self._measurements = {} # type: Dict[str, MeasurementValue] - self._contexts = {} # type: Dict[str, Any] - self._profile = ( - None - ) # type: Optional[sentry_sdk.profiler.transaction_profiler.Profile] - self._baggage = baggage - - def __repr__(self): - # type: () -> str - return ( - "<%s(name=%r, op=%r, trace_id=%r, span_id=%r, parent_span_id=%r, sampled=%r, source=%r, origin=%r)>" - % ( - self.__class__.__name__, - self.name, - self.op, - self.trace_id, - self.span_id, - self.parent_span_id, - self.sampled, - self.source, - self.origin, - ) - ) - - def _possibly_started(self): - # type: () -> bool - """Returns whether the transaction might have been started. - - If this returns False, we know that the transaction was not started - with sentry_sdk.start_transaction, and therefore the transaction will - be discarded. - """ - - # We must explicitly check self.sampled is False since self.sampled can be None - return self._span_recorder is not None or self.sampled is False - - def __enter__(self): - # type: () -> Transaction - if not self._possibly_started(): - logger.debug( - "Transaction was entered without being started with sentry_sdk.start_transaction." - "The transaction will not be sent to Sentry. To fix, start the transaction by" - "passing it to sentry_sdk.start_transaction." - ) - - super().__enter__() - - if self._profile is not None: - self._profile.__enter__() - - return self - - def __exit__(self, ty, value, tb): - # type: (Optional[Any], Optional[Any], Optional[Any]) -> None - if self._profile is not None: - self._profile.__exit__(ty, value, tb) - - super().__exit__(ty, value, tb) - - @property - def containing_transaction(self): - # type: () -> Transaction - """The root element of the span tree. - In the case of a transaction it is the transaction itself. - """ - - # Transactions (as spans) belong to themselves (as transactions). This - # is a getter rather than a regular attribute to avoid having a circular - # reference. - return self - - def finish( - self, - scope=None, # type: Optional[sentry_sdk.Scope] - end_timestamp=None, # type: Optional[Union[float, datetime]] - ): - # type: (...) -> Optional[str] - """Finishes the transaction and sends it to Sentry. - All finished spans in the transaction will also be sent to Sentry. - - :param scope: The Scope to use for this transaction. - If not provided, the current Scope will be used. - :param end_timestamp: Optional timestamp that should - be used as timestamp instead of the current time. - - :return: The event ID if the transaction was sent to Sentry, - otherwise None. - """ - if self.timestamp is not None: - # This transaction is already finished, ignore. - return None - - scope = scope or self.scope or sentry_sdk.get_current_scope() - client = sentry_sdk.get_client() - - if not client.is_active(): - # We have no active client and therefore nowhere to send this transaction. - return None - - if self._span_recorder is None: - # Explicit check against False needed because self.sampled might be None - if self.sampled is False: - logger.debug("Discarding transaction because sampled = False") - else: - logger.debug( - "Discarding transaction because it was not started with sentry_sdk.start_transaction" - ) - - # This is not entirely accurate because discards here are not - # exclusively based on sample rate but also traces sampler, but - # we handle this the same here. - if client.transport and has_tracing_enabled(client.options): - if client.monitor and client.monitor.downsample_factor > 0: - reason = "backpressure" - else: - reason = "sample_rate" - - client.transport.record_lost_event(reason, data_category="transaction") - - # Only one span (the transaction itself) is discarded, since we did not record any spans here. - client.transport.record_lost_event(reason, data_category="span") - return None - - if not self.name: - logger.warning( - "Transaction has no name, falling back to ``." - ) - self.name = "" - - super().finish(scope, end_timestamp) - - if not self.sampled: - # At this point a `sampled = None` should have already been resolved - # to a concrete decision. - if self.sampled is None: - logger.warning("Discarding transaction without sampling decision.") - - return None - - finished_spans = [ - span.to_json() - for span in self._span_recorder.spans - if span.timestamp is not None - ] - - # we do this to break the circular reference of transaction -> span - # recorder -> span -> containing transaction (which is where we started) - # before either the spans or the transaction goes out of scope and has - # to be garbage collected - self._span_recorder = None - - contexts = {} - contexts.update(self._contexts) - contexts.update({"trace": self.get_trace_context()}) - profile_context = self.get_profile_context() - if profile_context is not None: - contexts.update({"profile": profile_context}) - - event = { - "type": "transaction", - "transaction": self.name, - "transaction_info": {"source": self.source}, - "contexts": contexts, - "tags": self._tags, - "timestamp": self.timestamp, - "start_timestamp": self.start_timestamp, - "spans": finished_spans, - } # type: Event - - if self._profile is not None and self._profile.valid(): - event["profile"] = self._profile - self._profile = None - - event["measurements"] = self._measurements - - return scope.capture_event(event) - - def set_measurement(self, name, value, unit=""): - # type: (str, float, MeasurementUnit) -> None - self._measurements[name] = {"value": value, "unit": unit} - - def set_context(self, key, value): - # type: (str, Any) -> None - """Sets a context. Transactions can have multiple contexts - and they should follow the format described in the "Contexts Interface" - documentation. - - :param key: The name of the context. - :param value: The information about the context. - """ - self._contexts[key] = value - - def set_http_status(self, http_status): - # type: (int) -> None - """Sets the status of the Transaction according to the given HTTP status. - - :param http_status: The HTTP status code.""" - super().set_http_status(http_status) - self.set_context("response", {"status_code": http_status}) - - def to_json(self): - # type: () -> Dict[str, Any] - """Returns a JSON-compatible representation of the transaction.""" - rv = super().to_json() - - rv["name"] = self.name - rv["source"] = self.source - rv["sampled"] = self.sampled - - return rv - - def get_trace_context(self): - # type: () -> Any - trace_context = super().get_trace_context() - - if self._data: - trace_context["data"] = self._data - - return trace_context - - def get_baggage(self): - # type: () -> Baggage - """Returns the :py:class:`~sentry_sdk.tracing_utils.Baggage` - associated with the Transaction. - - The first time a new baggage with Sentry items is made, - it will be frozen.""" - - if not self._baggage or self._baggage.mutable: - self._baggage = Baggage.populate_from_transaction(self) - - return self._baggage - - def _set_initial_sampling_decision(self, sampling_context): - # type: (SamplingContext) -> None - """ - Sets the transaction's sampling decision, according to the following - precedence rules: - - 1. If a sampling decision is passed to `start_transaction` - (`start_transaction(name: "my transaction", sampled: True)`), that - decision will be used, regardless of anything else - - 2. If `traces_sampler` is defined, its decision will be used. It can - choose to keep or ignore any parent sampling decision, or use the - sampling context data to make its own decision or to choose a sample - rate for the transaction. - - 3. If `traces_sampler` is not defined, but there's a parent sampling - decision, the parent sampling decision will be used. - - 4. If `traces_sampler` is not defined and there's no parent sampling - decision, `traces_sample_rate` will be used. - """ - client = sentry_sdk.get_client() - - transaction_description = "{op}transaction <{name}>".format( - op=("<" + self.op + "> " if self.op else ""), name=self.name - ) - - # nothing to do if tracing is disabled - if not has_tracing_enabled(client.options): - self.sampled = False - return - - # if the user has forced a sampling decision by passing a `sampled` - # value when starting the transaction, go with that - if self.sampled is not None: - self.sample_rate = float(self.sampled) - return - - # we would have bailed already if neither `traces_sampler` nor - # `traces_sample_rate` were defined, so one of these should work; prefer - # the hook if so - sample_rate = ( - client.options["traces_sampler"](sampling_context) - if callable(client.options.get("traces_sampler")) - else ( - # default inheritance behavior - sampling_context["parent_sampled"] - if sampling_context["parent_sampled"] is not None - else client.options["traces_sample_rate"] - ) - ) - - # Since this is coming from the user (or from a function provided by the - # user), who knows what we might get. (The only valid values are - # booleans or numbers between 0 and 1.) - if not is_valid_sample_rate(sample_rate, source="Tracing"): - logger.warning( - "[Tracing] Discarding {transaction_description} because of invalid sample rate.".format( - transaction_description=transaction_description, - ) - ) - self.sampled = False - return - - self.sample_rate = float(sample_rate) - - if client.monitor: - self.sample_rate /= 2**client.monitor.downsample_factor - - # if the function returned 0 (or false), or if `traces_sample_rate` is - # 0, it's a sign the transaction should be dropped - if not self.sample_rate: - logger.debug( - "[Tracing] Discarding {transaction_description} because {reason}".format( - transaction_description=transaction_description, - reason=( - "traces_sampler returned 0 or False" - if callable(client.options.get("traces_sampler")) - else "traces_sample_rate is set to 0" - ), - ) - ) - self.sampled = False - return - - # Now we roll the dice. random.random is inclusive of 0, but not of 1, - # so strict < is safe here. In case sample_rate is a boolean, cast it - # to a float (True becomes 1.0 and False becomes 0.0) - self.sampled = random.random() < self.sample_rate - - if self.sampled: - logger.debug( - "[Tracing] Starting {transaction_description}".format( - transaction_description=transaction_description, - ) - ) - else: - logger.debug( - "[Tracing] Discarding {transaction_description} because it's not included in the random sample (sampling rate = {sample_rate})".format( - transaction_description=transaction_description, - sample_rate=self.sample_rate, - ) - ) - - -class NoOpSpan(Span): +class NoOpSpan: def __repr__(self): # type: () -> str return "<%s>" % self.__class__.__name__ @property def containing_transaction(self): - # type: () -> Optional[Transaction] + # type: () -> Optional[Span] return None def start_child(self, **kwargs): @@ -1195,7 +286,7 @@ def _set_initial_sampling_decision(self, sampling_context): pass -class POTelSpan: +class Span: """ OTel span wrapper providing compatibility with the old span interface. """ @@ -1211,9 +302,9 @@ def __init__( origin=None, # type: Optional[str] name=None, # type: Optional[str] source=TRANSACTION_SOURCE_CUSTOM, # type: str - attributes=None, # type: OTelSpanAttributes + attributes=None, # type: Optional[dict[str, Any]] only_if_parent=False, # type: bool - parent_span=None, # type: Optional[POTelSpan] + parent_span=None, # type: Optional[Span] otel_span=None, # type: Optional[OtelSpan] **_, # type: dict[str, object] ): @@ -1284,7 +375,7 @@ def __init__( self.set_status(status) def __eq__(self, other): - # type: (POTelSpan) -> bool + # type: (Span) -> bool return self._otel_span == other._otel_span def __repr__(self): @@ -1304,7 +395,7 @@ def __repr__(self): ) def __enter__(self): - # type: () -> POTelSpan + # type: () -> Span # XXX use_span? https://github.com/open-telemetry/opentelemetry-python/blob/3836da8543ce9751051e38a110c0468724042e62/opentelemetry-api/src/opentelemetry/trace/__init__.py#L547 # # create a Context object with parent set as current span @@ -1364,7 +455,7 @@ def origin(self, value): @property def containing_transaction(self): - # type: () -> Optional[POTelSpan] + # type: () -> Optional[Span] """ Get the transaction this span is a child of. @@ -1378,7 +469,7 @@ def containing_transaction(self): @property def root_span(self): - # type: () -> Optional[POTelSpan] + # type: () -> Optional[Span] from sentry_sdk.integrations.opentelemetry.utils import ( get_sentry_meta, ) @@ -1386,7 +477,7 @@ def root_span(self): root_otel_span = cast( "Optional[OtelSpan]", get_sentry_meta(self._otel_span, "root_span") ) - return POTelSpan(otel_span=root_otel_span) if root_otel_span else None + return Span(otel_span=root_otel_span) if root_otel_span else None @property def is_root_span(self): @@ -1515,8 +606,8 @@ def timestamp(self): return convert_from_otel_timestamp(end_time) def start_child(self, **kwargs): - # type: (**Any) -> POTelSpan - return POTelSpan(sampled=self.sampled, parent_span=self, **kwargs) + # type: (**Any) -> Span + return Span(sampled=self.sampled, parent_span=self, **kwargs) def iter_headers(self): # type: () -> Iterator[Tuple[str, str]] @@ -1691,6 +782,10 @@ def set_context(self, key, value): self.set_attribute(f"{SentrySpanAttribute.CONTEXT}.{key}", value) +# TODO-neel-potel add deprecation +Transaction = Span + + if TYPE_CHECKING: @overload @@ -1731,14 +826,3 @@ async def my_async_function(): return start_child_span_decorator(func) else: return start_child_span_decorator - - -# Circular imports - -from sentry_sdk.tracing_utils import ( - Baggage, - EnvironHeaders, - extract_sentrytrace_data, - has_tracing_enabled, - maybe_create_breadcrumbs_from_span, -) diff --git a/sentry_sdk/tracing_utils.py b/sentry_sdk/tracing_utils.py index e217994839..6ebe7e0322 100644 --- a/sentry_sdk/tracing_utils.py +++ b/sentry_sdk/tracing_utils.py @@ -154,12 +154,6 @@ def record_sql_queries( yield span -def maybe_create_breadcrumbs_from_span(scope, span): - # type: (sentry_sdk.Scope, sentry_sdk.tracing.Span) -> None - # TODO: can be removed when POtelSpan replaces Span - pass - - def _get_frame_module_abs_path(frame): # type: (FrameType) -> Optional[str] try: diff --git a/tests/integrations/asyncpg/test_asyncpg.py b/tests/integrations/asyncpg/test_asyncpg.py index 4604557a4a..579052da27 100644 --- a/tests/integrations/asyncpg/test_asyncpg.py +++ b/tests/integrations/asyncpg/test_asyncpg.py @@ -676,7 +676,7 @@ def fake_record_sql_queries(*args, **kwargs): yield span with mock.patch( - "sentry_sdk.tracing.POTelSpan.start_timestamp", + "sentry_sdk.tracing.Span.start_timestamp", datetime.datetime(2024, 1, 1, microsecond=0, tzinfo=datetime.timezone.utc), ): with mock.patch( @@ -723,7 +723,7 @@ def fake_record_sql_queries(*args, **kwargs): yield span with mock.patch( - "sentry_sdk.tracing.POTelSpan.start_timestamp", + "sentry_sdk.tracing.Span.start_timestamp", datetime.datetime(2024, 1, 1, microsecond=0, tzinfo=datetime.timezone.utc), ): with mock.patch( diff --git a/tests/integrations/httpx/test_httpx.py b/tests/integrations/httpx/test_httpx.py index a8a509152f..12a0038f6b 100644 --- a/tests/integrations/httpx/test_httpx.py +++ b/tests/integrations/httpx/test_httpx.py @@ -3,7 +3,6 @@ import httpx import pytest -import responses import sentry_sdk from sentry_sdk import capture_message, start_span @@ -62,7 +61,9 @@ def before_breadcrumb(crumb, hint): "httpx_client", (httpx.Client(), httpx.AsyncClient()), ) -def test_outgoing_trace_headers(sentry_init, httpx_client, capture_envelopes, httpx_mock): +def test_outgoing_trace_headers( + sentry_init, httpx_client, capture_envelopes, httpx_mock +): httpx_mock.add_response() sentry_init( diff --git a/tests/integrations/opentelemetry/test_compat.py b/tests/integrations/opentelemetry/test_compat.py index f2292d9ff2..1ae73494cd 100644 --- a/tests/integrations/opentelemetry/test_compat.py +++ b/tests/integrations/opentelemetry/test_compat.py @@ -19,7 +19,7 @@ def test_transaction_name_span_description_compat( ) as spn: ... - assert trx.__class__.__name__ == "POTelSpan" + assert trx.__class__.__name__ == "Span" assert trx.op == "trx-op" assert trx.name == "trx-name" assert trx.description is None @@ -30,7 +30,7 @@ def test_transaction_name_span_description_compat( assert trx._otel_span.attributes["sentry.name"] == "trx-name" assert "sentry.description" not in trx._otel_span.attributes - assert spn.__class__.__name__ == "POTelSpan" + assert spn.__class__.__name__ == "Span" assert spn.op == "span-op" assert spn.description == "span-desc" assert spn.name == "span-desc" diff --git a/tests/tracing/test_http_headers.py b/tests/tracing/test_http_headers.py deleted file mode 100644 index 6a8467101e..0000000000 --- a/tests/tracing/test_http_headers.py +++ /dev/null @@ -1,56 +0,0 @@ -from unittest import mock - -import pytest - -from sentry_sdk.tracing import Transaction -from sentry_sdk.tracing_utils import extract_sentrytrace_data - - -@pytest.mark.parametrize("sampled", [True, False, None]) -def test_to_traceparent(sampled): - transaction = Transaction( - name="/interactions/other-dogs/new-dog", - op="greeting.sniff", - trace_id="12312012123120121231201212312012", - sampled=sampled, - ) - - traceparent = transaction.to_traceparent() - - parts = traceparent.split("-") - assert parts[0] == "12312012123120121231201212312012" # trace_id - assert parts[1] == transaction.span_id # parent_span_id - if sampled is None: - assert len(parts) == 2 - else: - assert parts[2] == "1" if sampled is True else "0" # sampled - - -@pytest.mark.parametrize("sampling_decision", [True, False]) -def test_sentrytrace_extraction(sampling_decision): - sentrytrace_header = "12312012123120121231201212312012-0415201309082013-{}".format( - 1 if sampling_decision is True else 0 - ) - assert extract_sentrytrace_data(sentrytrace_header) == { - "trace_id": "12312012123120121231201212312012", - "parent_span_id": "0415201309082013", - "parent_sampled": sampling_decision, - } - - -def test_iter_headers(monkeypatch): - monkeypatch.setattr( - Transaction, - "to_traceparent", - mock.Mock(return_value="12312012123120121231201212312012-0415201309082013-0"), - ) - - transaction = Transaction( - name="/interactions/other-dogs/new-dog", - op="greeting.sniff", - ) - - headers = dict(transaction.iter_headers()) - assert ( - headers["sentry-trace"] == "12312012123120121231201212312012-0415201309082013-0" - ) From 3c7bb1e9e97dabdf0add56fa47caf919118c2a08 Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Tue, 21 Jan 2025 12:34:38 +0100 Subject: [PATCH 173/244] Remove nullcontext from wsgi and asgi (#3983) mypy was complaining too so just removed it --- sentry_sdk/integrations/asgi.py | 93 +++++++++++++++------------------ sentry_sdk/integrations/wsgi.py | 46 ++++++++-------- 2 files changed, 68 insertions(+), 71 deletions(-) diff --git a/sentry_sdk/integrations/asgi.py b/sentry_sdk/integrations/asgi.py index 4a3fe830eb..2a8bbe5091 100644 --- a/sentry_sdk/integrations/asgi.py +++ b/sentry_sdk/integrations/asgi.py @@ -6,7 +6,6 @@ import asyncio import inspect -from contextlib import nullcontext from copy import deepcopy from functools import partial @@ -169,20 +168,24 @@ async def _run_asgi3(self, scope, receive, send): # type: (Any, Any, Any) -> Any return await self._run_app(scope, receive, send, asgi_version=3) + async def _run_original_app(self, scope, receive, send, asgi_version): + # type: (Any, Any, Any, Any, int) -> Any + try: + if asgi_version == 2: + return await self.app(scope)(receive, send) + else: + return await self.app(scope, receive, send) + + except Exception as exc: + _capture_exception(exc, mechanism_type=self.mechanism_type) + raise exc from None + async def _run_app(self, scope, receive, send, asgi_version): # type: (Any, Any, Any, Any, int) -> Any is_recursive_asgi_middleware = _asgi_middleware_applied.get(False) is_lifespan = scope["type"] == "lifespan" if is_recursive_asgi_middleware or is_lifespan: - try: - if asgi_version == 2: - return await self.app(scope)(receive, send) - else: - return await self.app(scope, receive, send) - - except Exception as exc: - _capture_exception(exc, mechanism_type=self.mechanism_type) - raise exc from None + return await self._run_original_app(scope, receive, send, asgi_version) _asgi_middleware_applied.set(True) try: @@ -209,52 +212,42 @@ async def _run_app(self, scope, receive, send, asgi_version): method = scope.get("method", "").upper() should_trace = method in self.http_methods_to_capture + if not should_trace: + return await self._run_original_app( + scope, receive, send, asgi_version + ) + with sentry_sdk.continue_trace(_get_headers(scope)): - with ( - sentry_sdk.start_span( - op=( - OP.WEBSOCKET_SERVER - if ty == "websocket" - else OP.HTTP_SERVER - ), - name=transaction_name, - source=transaction_source, - origin=self.span_origin, - attributes=_prepopulate_attributes(scope), - ) - if should_trace - else nullcontext() + with sentry_sdk.start_span( + op=( + OP.WEBSOCKET_SERVER + if ty == "websocket" + else OP.HTTP_SERVER + ), + name=transaction_name, + source=transaction_source, + origin=self.span_origin, + attributes=_prepopulate_attributes(scope), ) as span: if span is not None: logger.debug("[ASGI] Started transaction: %s", span) span.set_tag("asgi.type", ty) - try: - - async def _sentry_wrapped_send(event): - # type: (Dict[str, Any]) -> Any - is_http_response = ( - event.get("type") == "http.response.start" - and span is not None - and "status" in event - ) - if is_http_response: - span.set_http_status(event["status"]) - - return await send(event) - - if asgi_version == 2: - return await self.app(scope)( - receive, _sentry_wrapped_send - ) - else: - return await self.app( - scope, receive, _sentry_wrapped_send - ) - except Exception as exc: - _capture_exception( - exc, mechanism_type=self.mechanism_type + + async def _sentry_wrapped_send(event): + # type: (Dict[str, Any]) -> Any + is_http_response = ( + event.get("type") == "http.response.start" + and span is not None + and "status" in event ) - raise exc from None + if is_http_response: + span.set_http_status(event["status"]) + + return await send(event) + + return await self._run_original_app( + scope, receive, _sentry_wrapped_send, asgi_version + ) finally: _asgi_middleware_applied.set(False) diff --git a/sentry_sdk/integrations/wsgi.py b/sentry_sdk/integrations/wsgi.py index e9cc65d716..74051df0db 100644 --- a/sentry_sdk/integrations/wsgi.py +++ b/sentry_sdk/integrations/wsgi.py @@ -1,5 +1,4 @@ import sys -from contextlib import nullcontext from functools import partial import sentry_sdk @@ -123,9 +122,9 @@ def __call__(self, environ, start_response): ) method = environ.get("REQUEST_METHOD", "").upper() should_trace = method in self.http_methods_to_capture - with sentry_sdk.continue_trace(environ): - with ( - sentry_sdk.start_span( + if should_trace: + with sentry_sdk.continue_trace(environ): + with sentry_sdk.start_span( op=OP.HTTP_SERVER, name=DEFAULT_TRANSACTION_NAME, source=TRANSACTION_SOURCE_ROUTE, @@ -133,31 +132,36 @@ def __call__(self, environ, start_response): attributes=_prepopulate_attributes( environ, self.use_x_forwarded_for ), - ) - if should_trace - else nullcontext() - ) as transaction: - try: - response = self.app( - environ, - partial( - _sentry_start_response, - start_response, - transaction, - ), + ) as span: + response = self._run_original_app( + environ, start_response, span ) - except BaseException: - reraise(*_capture_exception()) + else: + response = self._run_original_app(environ, start_response, None) finally: _wsgi_middleware_applied.set(False) return _ScopedResponse(scope, response) + def _run_original_app(self, environ, start_response, span): + # type: (dict[str, str], StartResponse, Optional[Span]) -> Any + try: + return self.app( + environ, + partial( + _sentry_start_response, + start_response, + span, + ), + ) + except BaseException: + reraise(*_capture_exception()) + def _sentry_start_response( # type: ignore old_start_response, # type: StartResponse - transaction, # type: Optional[Span] + span, # type: Optional[Span] status, # type: str response_headers, # type: WsgiResponseHeaders exc_info=None, # type: Optional[WsgiExcInfo] @@ -165,8 +169,8 @@ def _sentry_start_response( # type: ignore # type: (...) -> WsgiResponseIter with capture_internal_exceptions(): status_int = int(status.split(" ", 1)[0]) - if transaction is not None: - transaction.set_http_status(status_int) + if span is not None: + span.set_http_status(status_int) if exc_info is None: # The Django Rest Framework WSGI test client, and likely other From 37d85997734fcf3b1194aa685f7757492a2d1db0 Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Tue, 21 Jan 2025 15:02:12 +0100 Subject: [PATCH 174/244] Fix non scope related types (#3970) part of #3929 --- MIGRATION_GUIDE.md | 2 + sentry_sdk/client.py | 10 ++-- sentry_sdk/integrations/asyncpg.py | 4 +- sentry_sdk/integrations/clickhouse_driver.py | 21 +++++---- .../integrations/opentelemetry/integration.py | 4 +- .../integrations/opentelemetry/sampler.py | 24 ++++++---- .../integrations/opentelemetry/utils.py | 8 +++- sentry_sdk/tracing.py | 27 +++++++---- sentry_sdk/tracing_utils.py | 47 ------------------- sentry_sdk/transport.py | 3 +- 10 files changed, 64 insertions(+), 86 deletions(-) diff --git a/MIGRATION_GUIDE.md b/MIGRATION_GUIDE.md index d78abe14c5..7a1275b852 100644 --- a/MIGRATION_GUIDE.md +++ b/MIGRATION_GUIDE.md @@ -14,6 +14,7 @@ Looking to upgrade from Sentry SDK 2.x to 3.x? Here's a comprehensive list of wh - You can no longer change the sampled status of a span with `span.sampled = False` after starting it. - The `Span()` constructor does not accept a `hub` parameter anymore. - `Span.finish()` does not accept a `hub` parameter anymore. +- `Span.finish()` no longer returns the `event_id` if the event is sent to sentry. - The `Profile()` constructor does not accept a `hub` parameter anymore. - A `Profile` object does not have a `.hub` property anymore. - `sentry_sdk.continue_trace` no longer returns a `Transaction` and is now a context manager. @@ -146,6 +147,7 @@ Looking to upgrade from Sentry SDK 2.x to 3.x? Here's a comprehensive list of wh - `continue_from_headers`, `continue_from_environ` and `from_traceparent` have been removed, please use top-level API `sentry_sdk.continue_trace` instead. - `PropagationContext` constructor no longer takes a `dynamic_sampling_context` but takes a `baggage` object instead. - `ThreadingIntegration` no longer takes the `propagate_hub` argument. +- `Baggage.populate_from_transaction` has been removed. ### Deprecated diff --git a/sentry_sdk/client.py b/sentry_sdk/client.py index 796da2614e..85623d8056 100644 --- a/sentry_sdk/client.py +++ b/sentry_sdk/client.py @@ -449,7 +449,7 @@ def _prepare_event( ) return None - event = event_ + event = event_ # type: Optional[Event] # type: ignore[no-redef] spans_delta = spans_before - len(event.get("spans", [])) if is_transaction and spans_delta > 0 and self.transport is not None: @@ -483,7 +483,7 @@ def _prepare_event( for key in "release", "environment", "server_name", "dist": if event.get(key) is None and self.options[key] is not None: - event[key] = str(self.options[key]).strip() + event[key] = str(self.options[key]).strip() # type: ignore[literal-required] if event.get("sdk") is None: sdk_info = dict(SDK_INFO) sdk_info["integrations"] = sorted(self.integrations.keys()) @@ -523,7 +523,7 @@ def _prepare_event( and event is not None and event.get("type") != "transaction" ): - new_event = None + new_event = None # type: Optional[Event] with capture_internal_exceptions(): new_event = before_send(event, hint or {}) if new_event is None: @@ -532,7 +532,7 @@ def _prepare_event( self.transport.record_lost_event( "before_send", data_category="error" ) - event = new_event + event = new_event # type: Optional[Event] # type: ignore[no-redef] before_send_transaction = self.options["before_send_transaction"] if ( @@ -562,7 +562,7 @@ def _prepare_event( reason="before_send", data_category="span", quantity=spans_delta ) - event = new_event + event = new_event # type: Optional[Event] # type: ignore[no-redef] return event diff --git a/sentry_sdk/integrations/asyncpg.py b/sentry_sdk/integrations/asyncpg.py index 35726658ed..65f4d30e0d 100644 --- a/sentry_sdk/integrations/asyncpg.py +++ b/sentry_sdk/integrations/asyncpg.py @@ -189,7 +189,7 @@ async def _inner(*args: Any, **kwargs: Any) -> T: def _get_db_data( conn: Any = None, - addr: Optional[tuple[str]] = None, + addr: Optional[tuple[str, ...]] = None, database: Optional[str] = None, user: Optional[str] = None, ) -> dict[str, str]: @@ -218,6 +218,6 @@ def _get_db_data( return data -def _set_on_span(span: Span, data: dict[str, Any]): +def _set_on_span(span: Span, data: dict[str, Any]) -> None: for key, value in data.items(): span.set_attribute(key, value) diff --git a/sentry_sdk/integrations/clickhouse_driver.py b/sentry_sdk/integrations/clickhouse_driver.py index d43603ba2f..7c908b7d6d 100644 --- a/sentry_sdk/integrations/clickhouse_driver.py +++ b/sentry_sdk/integrations/clickhouse_driver.py @@ -9,7 +9,7 @@ ensure_integration_enabled, ) -from typing import TYPE_CHECKING, Any, Dict, TypeVar +from typing import TYPE_CHECKING, cast, Any, Dict, TypeVar # Hack to get new Python features working in older versions # without introducing a hard dependency on `typing_extensions` @@ -94,6 +94,7 @@ def _inner(*args: P.args, **kwargs: P.kwargs) -> T: connection._sentry_span = span # type: ignore[attr-defined] data = _get_db_data(connection) + data = cast("dict[str, Any]", data) data["db.query.text"] = query if query_id: @@ -116,9 +117,10 @@ def _inner(*args: P.args, **kwargs: P.kwargs) -> T: def _wrap_end(f: Callable[P, T]) -> Callable[P, T]: def _inner_end(*args: P.args, **kwargs: P.kwargs) -> T: res = f(*args, **kwargs) - connection = args[0].connection + client = cast("clickhouse_driver.client.Client", args[0]) + connection = client.connection - span = getattr(connection, "_sentry_span", None) # type: ignore[attr-defined] + span = getattr(connection, "_sentry_span", None) if span is not None: data = getattr(connection, "_sentry_db_data", {}) @@ -148,8 +150,9 @@ def _inner_end(*args: P.args, **kwargs: P.kwargs) -> T: def _wrap_send_data(f: Callable[P, T]) -> Callable[P, T]: def _inner_send_data(*args: P.args, **kwargs: P.kwargs) -> T: - connection = args[0].connection - db_params_data = args[2] + client = cast("clickhouse_driver.client.Client", args[0]) + connection = client.connection + db_params_data = cast("list[Any]", args[2]) span = getattr(connection, "_sentry_span", None) if span is not None: @@ -157,8 +160,10 @@ def _inner_send_data(*args: P.args, **kwargs: P.kwargs) -> T: _set_on_span(span, data) if should_send_default_pii(): - saved_db_data = getattr(connection, "_sentry_db_data", {}) - db_params = saved_db_data.get("db.params") or [] + saved_db_data = getattr( + connection, "_sentry_db_data", {} + ) # type: dict[str, Any] + db_params = saved_db_data.get("db.params") or [] # type: list[Any] db_params.extend(db_params_data) saved_db_data["db.params"] = db_params span.set_attribute("db.params", _serialize_span_attribute(db_params)) @@ -178,6 +183,6 @@ def _get_db_data(connection: clickhouse_driver.connection.Connection) -> Dict[st } -def _set_on_span(span: Span, data: Dict[str, Any]): +def _set_on_span(span: Span, data: Dict[str, Any]) -> None: for key, value in data.items(): span.set_attribute(key, _serialize_span_attribute(value)) diff --git a/sentry_sdk/integrations/opentelemetry/integration.py b/sentry_sdk/integrations/opentelemetry/integration.py index 551ef48891..1124e736ed 100644 --- a/sentry_sdk/integrations/opentelemetry/integration.py +++ b/sentry_sdk/integrations/opentelemetry/integration.py @@ -59,10 +59,10 @@ def _patch_readable_span(): def sentry_patched_readable_span(self): # type: (Span) -> ReadableSpan readable_span = old_readable_span(self) - readable_span._sentry_meta = getattr(self, "_sentry_meta", {}) + readable_span._sentry_meta = getattr(self, "_sentry_meta", {}) # type: ignore[attr-defined] return readable_span - Span._readable_span = sentry_patched_readable_span + Span._readable_span = sentry_patched_readable_span # type: ignore[method-assign] def _setup_sentry_tracing(): diff --git a/sentry_sdk/integrations/opentelemetry/sampler.py b/sentry_sdk/integrations/opentelemetry/sampler.py index 8d886add09..0b7004dc34 100644 --- a/sentry_sdk/integrations/opentelemetry/sampler.py +++ b/sentry_sdk/integrations/opentelemetry/sampler.py @@ -132,10 +132,15 @@ def should_sample( # parent_span_context.is_valid means this span has a parent, remote or local is_root_span = not parent_span_context.is_valid or parent_span_context.is_remote + sample_rate = None + # Explicit sampled value provided at start_span - if attributes.get(SentrySpanAttribute.CUSTOM_SAMPLED) is not None: + custom_sampled = cast( + "Optional[bool]", attributes.get(SentrySpanAttribute.CUSTOM_SAMPLED) + ) + if custom_sampled is not None: if is_root_span: - sample_rate = float(attributes[SentrySpanAttribute.CUSTOM_SAMPLED]) + sample_rate = float(custom_sampled) if sample_rate > 0: return sampled_result(parent_span_context, attributes, sample_rate) else: @@ -145,8 +150,6 @@ def should_sample( f"[Tracing] Ignoring sampled param for non-root span {name}" ) - sample_rate = None - # Check if there is a traces_sampler # Traces_sampler is responsible to check parent sampled to have full transactions. has_traces_sampler = callable(client.options.get("traces_sampler")) @@ -190,16 +193,19 @@ def get_description(self) -> str: def create_sampling_context(name, attributes, parent_span_context, trace_id): - # type: (str, Attributes, SpanContext, str) -> dict[str, Any] + # type: (str, Attributes, Optional[SpanContext], int) -> dict[str, Any] sampling_context = { "transaction_context": { "name": name, - "op": attributes.get(SentrySpanAttribute.OP), - "source": attributes.get(SentrySpanAttribute.SOURCE), + "op": attributes.get(SentrySpanAttribute.OP) if attributes else None, + "source": ( + attributes.get(SentrySpanAttribute.SOURCE) if attributes else None + ), }, "parent_sampled": get_parent_sampled(parent_span_context, trace_id), - } + } # type: dict[str, Any] - sampling_context.update(attributes) + if attributes is not None: + sampling_context.update(attributes) return sampling_context diff --git a/sentry_sdk/integrations/opentelemetry/utils.py b/sentry_sdk/integrations/opentelemetry/utils.py index 1db3f65da1..d890849c31 100644 --- a/sentry_sdk/integrations/opentelemetry/utils.py +++ b/sentry_sdk/integrations/opentelemetry/utils.py @@ -18,8 +18,12 @@ import sentry_sdk from sentry_sdk.utils import Dsn from sentry_sdk.consts import SPANSTATUS, OP, SPANDATA -from sentry_sdk.tracing import get_span_status_from_http_code, DEFAULT_SPAN_ORIGIN -from sentry_sdk.tracing_utils import Baggage, LOW_QUALITY_TRANSACTION_SOURCES +from sentry_sdk.tracing import ( + get_span_status_from_http_code, + DEFAULT_SPAN_ORIGIN, + LOW_QUALITY_TRANSACTION_SOURCES, +) +from sentry_sdk.tracing_utils import Baggage from sentry_sdk.integrations.opentelemetry.consts import SentrySpanAttribute from sentry_sdk._types import TYPE_CHECKING diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py index e67301e1a7..7b534d7efd 100644 --- a/sentry_sdk/tracing.py +++ b/sentry_sdk/tracing.py @@ -266,7 +266,7 @@ def finish( scope=None, # type: Optional[sentry_sdk.Scope] end_timestamp=None, # type: Optional[Union[float, datetime]] ): - # type: (...) -> Optional[str] + # type: (...) -> None pass def set_measurement(self, name, value, unit=""): @@ -375,7 +375,9 @@ def __init__( self.set_status(status) def __eq__(self, other): - # type: (Span) -> bool + # type: (object) -> bool + if not isinstance(other, Span): + return False return self._otel_span == other._otel_span def __repr__(self): @@ -526,7 +528,6 @@ def sample_rate(self): sample_rate = self._otel_span.get_span_context().trace_state.get( TRACESTATE_SAMPLE_RATE_KEY ) - sample_rate = cast("Optional[str]", sample_rate) return float(sample_rate) if sample_rate is not None else None @property @@ -668,18 +669,24 @@ def set_data(self, key, value): def get_attribute(self, name): # type: (str) -> Optional[Any] - if not isinstance(self._otel_span, ReadableSpan): + if ( + not isinstance(self._otel_span, ReadableSpan) + or not self._otel_span.attributes + ): return None return self._otel_span.attributes.get(name) def set_attribute(self, key, value): # type: (str, Any) -> None + # otel doesn't support None as values, preferring to not set the key + # at all instead if value is None: - # otel doesn't support None as values, preferring to not set the key - # at all instead + return + serialized_value = _serialize_span_attribute(value) + if serialized_value is None: return - self._otel_span.set_attribute(key, _serialize_span_attribute(value)) + self._otel_span.set_attribute(key, serialized_value) @property def status(self): @@ -690,7 +697,7 @@ def status(self): Sentry `SPANSTATUS` it can not be guaranteed that the status set in `set_status()` will be the same as the one returned here. """ - if not hasattr(self._otel_span, "status"): + if not isinstance(self._otel_span, ReadableSpan): return None if self._otel_span.status.status_code == StatusCode.UNSET: @@ -740,10 +747,10 @@ def set_http_status(self, http_status): def is_success(self): # type: () -> bool - return self._otel_span.status.code == StatusCode.OK + return self.status == SPANSTATUS.OK def finish(self, end_timestamp=None): - # type: (Optional[Union[float, datetime]]) -> Optional[str] + # type: (Optional[Union[float, datetime]]) -> None if end_timestamp is not None: from sentry_sdk.integrations.opentelemetry.utils import ( convert_to_otel_timestamp, diff --git a/sentry_sdk/tracing_utils.py b/sentry_sdk/tracing_utils.py index 6ebe7e0322..952ffccc4c 100644 --- a/sentry_sdk/tracing_utils.py +++ b/sentry_sdk/tracing_utils.py @@ -525,52 +525,6 @@ def from_options(cls, scope): return Baggage(sentry_items, third_party_items, mutable) - @classmethod - def populate_from_transaction(cls, transaction): - # type: (sentry_sdk.tracing.Transaction) -> Baggage - """ - Populate fresh baggage entry with sentry_items and make it immutable - if this is the head SDK which originates traces. - """ - client = sentry_sdk.get_client() - sentry_items = {} # type: Dict[str, str] - - if not client.is_active(): - return Baggage(sentry_items) - - options = client.options or {} - - sentry_items["trace_id"] = transaction.trace_id - - if options.get("environment"): - sentry_items["environment"] = options["environment"] - - if options.get("release"): - sentry_items["release"] = options["release"] - - if options.get("dsn"): - sentry_items["public_key"] = Dsn(options["dsn"]).public_key - - if ( - transaction.name - and transaction.source not in LOW_QUALITY_TRANSACTION_SOURCES - ): - sentry_items["transaction"] = transaction.name - - if transaction.sample_rate is not None: - sentry_items["sample_rate"] = str(transaction.sample_rate) - - if transaction.sampled is not None: - sentry_items["sampled"] = "true" if transaction.sampled else "false" - - # there's an existing baggage but it was mutable, - # which is why we are creating this new baggage. - # However, if by chance the user put some sentry items in there, give them precedence. - if transaction._baggage and transaction._baggage.sentry_items: - sentry_items.update(transaction._baggage.sentry_items) - - return Baggage(sentry_items, mutable=False) - def freeze(self): # type: () -> None self.mutable = False @@ -722,6 +676,5 @@ def get_current_span(scope=None): # Circular imports from sentry_sdk.tracing import ( BAGGAGE_HEADER_NAME, - LOW_QUALITY_TRANSACTION_SOURCES, SENTRY_TRACE_HEADER_NAME, ) diff --git a/sentry_sdk/transport.py b/sentry_sdk/transport.py index 278a0a43d0..99251cf439 100644 --- a/sentry_sdk/transport.py +++ b/sentry_sdk/transport.py @@ -23,7 +23,7 @@ from sentry_sdk.worker import BackgroundWorker from sentry_sdk.envelope import Envelope, Item, PayloadRef -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, cast if TYPE_CHECKING: from typing import Any @@ -179,6 +179,7 @@ def _parse_rate_limits(header, now=None): retry_after = now + timedelta(seconds=int(retry_after_val)) for category in categories and categories.split(";") or (None,): + category = cast("Optional[EventDataCategory]", category) yield category, retry_after except (LookupError, ValueError): continue From 9df9cbf124521f99593a2dd5d2896c33d250b312 Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Wed, 22 Jan 2025 16:49:57 +0100 Subject: [PATCH 175/244] Cleanup scopes types and no-op old scope tracing methods (#3985) closes #3929 and #3969 --- sentry_sdk/api.py | 5 +- sentry_sdk/client.py | 2 +- .../integrations/opentelemetry/scope.py | 34 +++-- .../integrations/opentelemetry/utils.py | 35 +++-- sentry_sdk/scope.py | 134 +++--------------- sentry_sdk/tracing.py | 5 +- 6 files changed, 73 insertions(+), 142 deletions(-) diff --git a/sentry_sdk/api.py b/sentry_sdk/api.py index 86577cc500..50c12dd636 100644 --- a/sentry_sdk/api.py +++ b/sentry_sdk/api.py @@ -7,6 +7,7 @@ from sentry_sdk.crons import monitor # TODO-neel-potel make 2 scope strategies/impls and switch +from sentry_sdk.scope import Scope as BaseScope from sentry_sdk.integrations.opentelemetry.scope import ( PotelScope as Scope, new_scope, @@ -123,7 +124,7 @@ def is_initialized(): @scopemethod def get_global_scope(): - # type: () -> Scope + # type: () -> BaseScope return Scope.get_global_scope() @@ -239,7 +240,7 @@ def flush( def start_span(**kwargs): - # type: (type.Any) -> Span + # type: (Any) -> Span """ Start and return a span. diff --git a/sentry_sdk/client.py b/sentry_sdk/client.py index 85623d8056..fefdcaecd8 100644 --- a/sentry_sdk/client.py +++ b/sentry_sdk/client.py @@ -483,7 +483,7 @@ def _prepare_event( for key in "release", "environment", "server_name", "dist": if event.get(key) is None and self.options[key] is not None: - event[key] = str(self.options[key]).strip() # type: ignore[literal-required] + event[key] = str(self.options[key]).strip() if event.get("sdk") is None: sdk_info = dict(SDK_INFO) sdk_info["integrations"] = sorted(self.integrations.keys()) diff --git a/sentry_sdk/integrations/opentelemetry/scope.py b/sentry_sdk/integrations/opentelemetry/scope.py index d16215ab20..c60e5eb716 100644 --- a/sentry_sdk/integrations/opentelemetry/scope.py +++ b/sentry_sdk/integrations/opentelemetry/scope.py @@ -29,6 +29,7 @@ from sentry_sdk.integrations.opentelemetry.utils import trace_state_from_baggage from sentry_sdk.scope import Scope, ScopeType from sentry_sdk.tracing import Span +from sentry_sdk.utils import logger from sentry_sdk._types import TYPE_CHECKING if TYPE_CHECKING: @@ -41,15 +42,17 @@ class PotelScope(Scope): @classmethod def _get_scopes(cls): - # type: () -> Optional[Tuple[Scope, Scope]] + # type: () -> Optional[Tuple[PotelScope, PotelScope]] """ Returns the current scopes tuple on the otel context. Internal use only. """ - return cast("Optional[Tuple[Scope, Scope]]", get_value(SENTRY_SCOPES_KEY)) + return cast( + "Optional[Tuple[PotelScope, PotelScope]]", get_value(SENTRY_SCOPES_KEY) + ) @classmethod def get_current_scope(cls): - # type: () -> Scope + # type: () -> PotelScope """ Returns the current scope. """ @@ -57,7 +60,7 @@ def get_current_scope(cls): @classmethod def _get_current_scope(cls): - # type: () -> Optional[Scope] + # type: () -> Optional[PotelScope] """ Returns the current scope without creating a new one. Internal use only. """ @@ -66,7 +69,7 @@ def _get_current_scope(cls): @classmethod def get_isolation_scope(cls): - # type: () -> Scope + # type: () -> PotelScope """ Returns the isolation scope. """ @@ -74,7 +77,7 @@ def get_isolation_scope(cls): @classmethod def _get_isolation_scope(cls): - # type: () -> Optional[Scope] + # type: () -> Optional[PotelScope] """ Returns the isolation scope without creating a new one. Internal use only. """ @@ -84,6 +87,11 @@ def _get_isolation_scope(cls): @contextmanager def continue_trace(self, environ_or_headers): # type: (Dict[str, Any]) -> Generator[None, None, None] + """ + Sets the propagation context from environment or headers to continue an incoming trace. + Any span started within this context manager will use the same trace_id, parent_span_id + and inherit the sampling decision from the incoming trace. + """ self.generate_propagation_context(environ_or_headers) span_context = self._incoming_otel_span_context() @@ -118,8 +126,8 @@ def _incoming_otel_span_context(self): trace_state = trace_state.add(TRACESTATE_SAMPLED_KEY, "deferred") span_context = SpanContext( - trace_id=int(self._propagation_context.trace_id, 16), # type: ignore - span_id=int(self._propagation_context.parent_span_id, 16), # type: ignore + trace_id=int(self._propagation_context.trace_id, 16), + span_id=int(self._propagation_context.parent_span_id, 16), is_remote=True, trace_flags=trace_flags, trace_state=trace_state, @@ -134,18 +142,22 @@ def start_transaction(self, **kwargs): This function is deprecated and will be removed in a future release. Use :py:meth:`sentry_sdk.start_span` instead. """ + logger.warning( + "The `start_transaction` method is deprecated, please use `sentry_sdk.start_span instead.`" + ) return self.start_span(**kwargs) def start_span(self, **kwargs): # type: (Any) -> Span - return Span(**kwargs, scope=self) + return Span(**kwargs) -_INITIAL_CURRENT_SCOPE = None -_INITIAL_ISOLATION_SCOPE = None +_INITIAL_CURRENT_SCOPE = PotelScope(ty=ScopeType.CURRENT) +_INITIAL_ISOLATION_SCOPE = PotelScope(ty=ScopeType.ISOLATION) def setup_initial_scopes(): + # type: () -> None global _INITIAL_CURRENT_SCOPE, _INITIAL_ISOLATION_SCOPE _INITIAL_CURRENT_SCOPE = PotelScope(ty=ScopeType.CURRENT) _INITIAL_ISOLATION_SCOPE = PotelScope(ty=ScopeType.ISOLATION) diff --git a/sentry_sdk/integrations/opentelemetry/utils.py b/sentry_sdk/integrations/opentelemetry/utils.py index d890849c31..34fea2f46d 100644 --- a/sentry_sdk/integrations/opentelemetry/utils.py +++ b/sentry_sdk/integrations/opentelemetry/utils.py @@ -95,7 +95,7 @@ def convert_from_otel_timestamp(time): def convert_to_otel_timestamp(time): - # type: (Union[datetime.datetime, float]) -> int + # type: (Union[datetime, float]) -> int """Convert a datetime to an OTel timestamp (with nanosecond precision).""" if isinstance(time, datetime): return int(time.timestamp() * 1e9) @@ -121,9 +121,12 @@ def extract_span_data(span): if span.attributes is None: return (op, description, status, http_status, origin) - op = span.attributes.get(SentrySpanAttribute.OP) or op - description = span.attributes.get(SentrySpanAttribute.DESCRIPTION) or description - origin = span.attributes.get(SentrySpanAttribute.ORIGIN) + attribute_op = cast("Optional[str]", span.attributes.get(SentrySpanAttribute.OP)) + op = attribute_op or op + description = cast( + "str", span.attributes.get(SentrySpanAttribute.DESCRIPTION) or description + ) + origin = cast("Optional[str]", span.attributes.get(SentrySpanAttribute.ORIGIN)) http_method = span.attributes.get(SpanAttributes.HTTP_METHOD) http_method = cast("Optional[str]", http_method) @@ -137,7 +140,7 @@ def extract_span_data(span): rpc_service = span.attributes.get(SpanAttributes.RPC_SERVICE) if rpc_service: return ( - span.attributes.get(SentrySpanAttribute.OP) or "rpc", + attribute_op or "rpc", description, status, http_status, @@ -147,7 +150,7 @@ def extract_span_data(span): messaging_system = span.attributes.get(SpanAttributes.MESSAGING_SYSTEM) if messaging_system: return ( - span.attributes.get(SentrySpanAttribute.OP) or "message", + attribute_op or "message", description, status, http_status, @@ -165,7 +168,7 @@ def span_data_for_http_method(span): # type: (ReadableSpan) -> OtelExtractedSpanData span_attributes = span.attributes or {} - op = span_attributes.get(SentrySpanAttribute.OP) + op = cast("Optional[str]", span_attributes.get(SentrySpanAttribute.OP)) if op is None: op = "http" @@ -183,6 +186,7 @@ def span_data_for_http_method(span): description = span_attributes.get( SentrySpanAttribute.DESCRIPTION ) or span_attributes.get(SentrySpanAttribute.NAME) + description = cast("Optional[str]", description) if description is None: description = f"{http_method}" @@ -205,7 +209,7 @@ def span_data_for_http_method(span): status, http_status = extract_span_status(span) - origin = span_attributes.get(SentrySpanAttribute.ORIGIN) + origin = cast("Optional[str]", span_attributes.get(SentrySpanAttribute.ORIGIN)) return (op, description, status, http_status, origin) @@ -214,13 +218,13 @@ def span_data_for_db_query(span): # type: (ReadableSpan) -> OtelExtractedSpanData span_attributes = span.attributes or {} - op = span_attributes.get(SentrySpanAttribute.OP, OP.DB) + op = cast("str", span_attributes.get(SentrySpanAttribute.OP, OP.DB)) statement = span_attributes.get(SpanAttributes.DB_STATEMENT, None) statement = cast("Optional[str]", statement) description = statement or span.name - origin = span_attributes.get(SentrySpanAttribute.ORIGIN) + origin = cast("Optional[str]", span_attributes.get(SentrySpanAttribute.ORIGIN)) return (op, description, None, None, origin) @@ -293,19 +297,20 @@ def extract_span_attributes(span, namespace): """ Extract Sentry-specific span attributes and make them look the way Sentry expects. """ - extracted_attrs = {} + extracted_attrs = {} # type: dict[str, Any] for attr, value in (span.attributes or {}).items(): if attr.startswith(namespace): key = attr[len(namespace) + 1 :] if namespace == SentrySpanAttribute.MEASUREMENT: - value = { + value = cast("tuple[str, str]", value) + extracted_attrs[key] = { "value": float(value[0]), "unit": value[1], } - - extracted_attrs[key] = value + else: + extracted_attrs[key] = value return extracted_attrs @@ -457,7 +462,7 @@ def set_sentry_meta(span, key, value): # type: (Union[AbstractSpan, ReadableSpan], str, Any) -> None sentry_meta = getattr(span, "_sentry_meta", {}) sentry_meta[key] = value - span._sentry_meta = sentry_meta + span._sentry_meta = sentry_meta # type: ignore[union-attr] def get_profile_context(span): diff --git a/sentry_sdk/scope.py b/sentry_sdk/scope.py index af69aca4ee..45f3dfe0a0 100644 --- a/sentry_sdk/scope.py +++ b/sentry_sdk/scope.py @@ -12,13 +12,11 @@ from sentry_sdk.attachments import Attachment from sentry_sdk.consts import DEFAULT_MAX_BREADCRUMBS, FALSE_VALUES from sentry_sdk.feature_flags import FlagBuffer, DEFAULT_FLAG_CAPACITY -from sentry_sdk.profiler.continuous_profiler import try_autostart_continuous_profiler from sentry_sdk.profiler.transaction_profiler import Profile from sentry_sdk.session import Session from sentry_sdk.tracing_utils import ( Baggage, has_tracing_enabled, - normalize_incoming_data, PropagationContext, ) from sentry_sdk.tracing import ( @@ -55,6 +53,7 @@ from typing import Tuple from typing import TypeVar from typing import Union + from typing import Self from typing_extensions import Unpack @@ -67,7 +66,6 @@ ExcInfo, Hint, LogLevelStr, - SamplingContext, Type, ) @@ -194,12 +192,12 @@ def __init__(self, ty=None, client=None): self.generate_propagation_context(incoming_data=incoming_trace_information) def __copy__(self): - # type: () -> Scope + # type: () -> Self """ Returns a copy of this scope. This also creates a copy of all referenced data structures. """ - rv = object.__new__(self.__class__) # type: Scope + rv = object.__new__(self.__class__) # type: Self rv._type = self._type rv.client = self.client @@ -333,7 +331,7 @@ def last_event_id(cls): return cls.get_isolation_scope()._last_event_id def _merge_scopes(self, additional_scope=None, additional_scope_kwargs=None): - # type: (Optional[Scope], Optional[Dict[str, Any]]) -> Scope + # type: (Optional[Scope], Optional[Dict[str, Any]]) -> Self """ Merges global, isolation and current scope into a new scope and adds the given additional scope or additional scope kwargs to it. @@ -419,7 +417,7 @@ def set_client(self, client=None): self.client = client if client is not None else NonRecordingClient() def fork(self): - # type: () -> Scope + # type: () -> Self """ .. versionadded:: 2.0.0 @@ -949,71 +947,20 @@ def add_breadcrumb(self, crumb=None, hint=None, **kwargs): while len(self._breadcrumbs) > max_breadcrumbs: self._breadcrumbs.popleft() - def start_transaction(self, transaction=None, **kwargs): - # type: (Optional[Span], Optional[SamplingContext], Unpack[TransactionKwargs]) -> Union[Span, NoOpSpan] + def start_transaction(self, **kwargs): + # type: (Unpack[TransactionKwargs]) -> Union[NoOpSpan, Span] """ - Start and return a transaction. - - Start an existing transaction if given, otherwise create and start a new - transaction with kwargs. - - This is the entry point to manual tracing instrumentation. - - A tree structure can be built by adding child spans to the transaction, - and child spans to other spans. To start a new child span within the - transaction or any span, call the respective `.start_child()` method. - - Every child span must be finished before the transaction is finished, - otherwise the unfinished spans are discarded. - - When used as context managers, spans and transactions are automatically - finished at the end of the `with` block. If not using context managers, - call the `.finish()` method. - - When the transaction is finished, it will be sent to Sentry with all its - finished child spans. - - :param transaction: The transaction to start. If omitted, we create and - start a new transaction. - :param kwargs: Optional keyword arguments to be passed to the Transaction - constructor. See :py:class:`sentry_sdk.tracing.Transaction` for - available arguments. + .. deprecated:: 3.0.0 + This function is deprecated and will be removed in a future release. + Use :py:meth:`sentry_sdk.start_span` instead. """ - # TODO-neel-potel fix signature and no op - kwargs.setdefault("scope", self) - - client = self.get_client() - - try_autostart_continuous_profiler() - - # if we haven't been given a transaction, make one - transaction = Span(**kwargs) - - # use traces_sample_rate, traces_sampler, and/or inheritance to make a - # sampling decision - sampling_context = { - "transaction_context": transaction.to_json(), - "parent_sampled": transaction.parent_sampled, - } - transaction._set_initial_sampling_decision(sampling_context=sampling_context) - - if transaction.sampled: - profile = Profile( - transaction.sampled, transaction._start_timestamp_monotonic_ns - ) - profile._set_initial_sampling_decision(sampling_context=sampling_context) - - transaction._profile = profile - - # we don't bother to keep spans if we already know we're not going to - # send the transaction - max_spans = (client.options["_experiments"].get("max_spans")) or 1000 - transaction.init_span_recorder(maxlen=max_spans) - - return transaction + logger.warning( + "The `start_transaction` method is deprecated, please use `sentry_sdk.start_span instead.`" + ) + return NoOpSpan(**kwargs) def start_span(self, **kwargs): - # type: (Optional[Span], Any) -> Span + # type: (Any) -> Union[NoOpSpan, Span] """ Start a span whose parent is the currently active span, if any. @@ -1023,53 +970,16 @@ def start_span(self, **kwargs): For supported `**kwargs` see :py:class:`sentry_sdk.tracing.Span`. """ - # TODO-neel-potel fix signature and no op - if kwargs.get("description") is not None: - warnings.warn( - "The `description` parameter is deprecated. Please use `name` instead.", - DeprecationWarning, - stacklevel=2, - ) - - with new_scope(): - kwargs.setdefault("scope", self) - - # get current span or transaction - span = self.span or self.get_isolation_scope().span + return NoOpSpan(**kwargs) - if span is None: - # New spans get the `trace_id` from the scope - if "trace_id" not in kwargs: - propagation_context = self.get_active_propagation_context() - if propagation_context is not None: - kwargs["trace_id"] = propagation_context.trace_id - - span = Span(**kwargs) - else: - # Children take `trace_id`` from the parent span. - span = span.start_child(**kwargs) - - return span - - def continue_trace( - self, environ_or_headers, op=None, name=None, source=None, origin=None - ): - # TODO-neel-potel fix signature and no op - # type: (Dict[str, Any], Optional[str], Optional[str], Optional[str], Optional[str]) -> Span + @contextmanager + def continue_trace(self, environ_or_headers): + # type: (Dict[str, Any]) -> Generator[None, None, None] """ - Sets the propagation context from environment or headers and returns a transaction. + Sets the propagation context from environment or headers to continue an incoming trace. """ self.generate_propagation_context(environ_or_headers) - - transaction = Span.continue_from_headers( - normalize_incoming_data(environ_or_headers), - op=op, - origin=origin, - name=name, - source=source, - ) - - return transaction + yield def capture_event(self, event, hint=None, scope=None, **scope_kwargs): # type: (Event, Optional[Hint], Optional[Scope], Any) -> Optional[str] @@ -1382,7 +1292,7 @@ def run_event_processors(self, event, hint): ) for event_processor in event_processors: - new_event = event + new_event = event # type: Optional[Event] with capture_internal_exceptions(): new_event = event_processor(event, hint) if new_event is None: diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py index 7b534d7efd..9d0ee6404d 100644 --- a/sentry_sdk/tracing.py +++ b/sentry_sdk/tracing.py @@ -200,6 +200,10 @@ def get_span_status_from_http_code(http_status_code): class NoOpSpan: + def __init__(self, **kwargs): + # type: (Any) -> None + pass + def __repr__(self): # type: () -> str return "<%s>" % self.__class__.__name__ @@ -263,7 +267,6 @@ def get_profile_context(self): def finish( self, - scope=None, # type: Optional[sentry_sdk.Scope] end_timestamp=None, # type: Optional[Union[float, datetime]] ): # type: (...) -> None From 289496349140822e2102aeb9c30d360869b043a4 Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Wed, 22 Jan 2025 16:50:31 +0100 Subject: [PATCH 176/244] Fix circular imports for sphinx (#3986) --- docs/conf.py | 2 ++ .../integrations/opentelemetry/contextvars_context.py | 8 ++++---- sentry_sdk/integrations/opentelemetry/propagator.py | 4 ++-- 3 files changed, 8 insertions(+), 6 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index 1d58274beb..64d49a1188 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -13,6 +13,8 @@ import sphinx.ext.autodoc # noqa: F401 import sphinx.ext.intersphinx # noqa: F401 import urllib3.exceptions # noqa: F401 +import importlib_metadata # noqa: F401 +import opentelemetry.sdk.metrics._internal # noqa: F401 typing.TYPE_CHECKING = True diff --git a/sentry_sdk/integrations/opentelemetry/contextvars_context.py b/sentry_sdk/integrations/opentelemetry/contextvars_context.py index 8025f26ba8..df818bc399 100644 --- a/sentry_sdk/integrations/opentelemetry/contextvars_context.py +++ b/sentry_sdk/integrations/opentelemetry/contextvars_context.py @@ -14,7 +14,7 @@ if TYPE_CHECKING: from typing import Optional - from sentry_sdk.integrations.opentelemetry.scope import PotelScope + import sentry_sdk.integrations.opentelemetry.scope as scope class SentryContextVarsRuntimeContext(ContextVarsRuntimeContext): @@ -29,16 +29,16 @@ def attach(self, context): should_use_isolation_scope = context.pop(SENTRY_USE_ISOLATION_SCOPE_KEY, None) should_use_isolation_scope = cast( - "Optional[PotelScope]", should_use_isolation_scope + "Optional[scope.PotelScope]", should_use_isolation_scope ) should_use_current_scope = context.pop(SENTRY_USE_CURRENT_SCOPE_KEY, None) should_use_current_scope = cast( - "Optional[PotelScope]", should_use_current_scope + "Optional[scope.PotelScope]", should_use_current_scope ) if scopes: - scopes = cast("tuple[PotelScope, PotelScope]", scopes) + scopes = cast("tuple[scope.PotelScope, scope.PotelScope]", scopes) (current_scope, isolation_scope) = scopes else: current_scope = sentry_sdk.get_current_scope() diff --git a/sentry_sdk/integrations/opentelemetry/propagator.py b/sentry_sdk/integrations/opentelemetry/propagator.py index 37d6362f82..fcc2009849 100644 --- a/sentry_sdk/integrations/opentelemetry/propagator.py +++ b/sentry_sdk/integrations/opentelemetry/propagator.py @@ -36,7 +36,7 @@ if TYPE_CHECKING: from typing import Optional, Set - from sentry_sdk.integrations.opentelemetry.scope import PotelScope + import sentry_sdk.integrations.opentelemetry.scope as scope class SentryPropagator(TextMapPropagator): @@ -94,7 +94,7 @@ def inject(self, carrier, context=None, setter=default_setter): scopes = get_value(SENTRY_SCOPES_KEY, context) if scopes: - scopes = cast("tuple[PotelScope, PotelScope]", scopes) + scopes = cast("tuple[scope.PotelScope, scope.PotelScope]", scopes) (current_scope, _) = scopes # TODO-neel-potel check trace_propagation_targets From 9536cd5541b6ad365fe6d7e2dbc08f5f43fc2ba9 Mon Sep 17 00:00:00 2001 From: Ivana Kellyer Date: Thu, 13 Feb 2025 11:40:20 +0100 Subject: [PATCH 177/244] typo --- sentry_sdk/integrations/opentelemetry/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sentry_sdk/integrations/opentelemetry/utils.py b/sentry_sdk/integrations/opentelemetry/utils.py index 34fea2f46d..6e711c34a8 100644 --- a/sentry_sdk/integrations/opentelemetry/utils.py +++ b/sentry_sdk/integrations/opentelemetry/utils.py @@ -388,7 +388,7 @@ def dsc_from_trace_state(trace_state): def has_incoming_trace(trace_state): # type: (TraceState) -> bool """ - The existence a sentry-trace_id in the baggage implies we continued an upstream trace. + The existence of a sentry-trace_id in the baggage implies we continued an upstream trace. """ return (Baggage.SENTRY_PREFIX + "trace_id") in trace_state From 71e22b4d5b70e18c4b8c969fff6ca5e059f95ce1 Mon Sep 17 00:00:00 2001 From: Ivana Kellyer Date: Mon, 17 Feb 2025 15:15:25 +0100 Subject: [PATCH 178/244] Sync toxgen with master (#4061) --- .github/workflows/test-integrations-ai.yml | 8 +- .github/workflows/test-integrations-dbs.yml | 6 +- .github/workflows/test-integrations-flags.yml | 62 +- .../workflows/test-integrations-gevent.yml | 2 +- .../workflows/test-integrations-graphql.yml | 62 +- .github/workflows/test-integrations-misc.yml | 68 +- .../workflows/test-integrations-network.yml | 6 +- .github/workflows/test-integrations-tasks.yml | 6 +- .github/workflows/test-integrations-web-1.yml | 6 +- .github/workflows/test-integrations-web-2.yml | 8 +- scripts/populate_tox/tox.jinja | 57 +- tox.ini | 594 +++++++++--------- 12 files changed, 344 insertions(+), 541 deletions(-) diff --git a/.github/workflows/test-integrations-ai.yml b/.github/workflows/test-integrations-ai.yml index 85c5475aff..a0f3e26a04 100644 --- a/.github/workflows/test-integrations-ai.yml +++ b/.github/workflows/test-integrations-ai.yml @@ -29,7 +29,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: ["3.7","3.9","3.11","3.12","3.13"] + python-version: ["3.7","3.9","3.11","3.12"] os: [ubuntu-22.04] steps: - uses: actions/checkout@v4.2.2 @@ -70,7 +70,7 @@ jobs: coverage xml - name: Upload coverage to Codecov if: ${{ !cancelled() }} - uses: codecov/codecov-action@v5.1.2 + uses: codecov/codecov-action@v5.3.1 with: token: ${{ secrets.CODECOV_TOKEN }} files: coverage.xml @@ -91,7 +91,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: ["3.8","3.9","3.11","3.12","3.13"] + python-version: ["3.8","3.9","3.11","3.12"] os: [ubuntu-22.04] steps: - uses: actions/checkout@v4.2.2 @@ -132,7 +132,7 @@ jobs: coverage xml - name: Upload coverage to Codecov if: ${{ !cancelled() }} - uses: codecov/codecov-action@v5.1.2 + uses: codecov/codecov-action@v5.3.1 with: token: ${{ secrets.CODECOV_TOKEN }} files: coverage.xml diff --git a/.github/workflows/test-integrations-dbs.yml b/.github/workflows/test-integrations-dbs.yml index 12359c93b4..92778617f4 100644 --- a/.github/workflows/test-integrations-dbs.yml +++ b/.github/workflows/test-integrations-dbs.yml @@ -93,7 +93,7 @@ jobs: coverage xml - name: Upload coverage to Codecov if: ${{ !cancelled() }} - uses: codecov/codecov-action@v5.1.2 + uses: codecov/codecov-action@v5.3.1 with: token: ${{ secrets.CODECOV_TOKEN }} files: coverage.xml @@ -114,7 +114,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: ["3.7","3.8","3.9","3.10","3.11","3.12"] + python-version: ["3.7","3.8","3.9","3.10","3.11","3.12","3.13"] os: [ubuntu-22.04] services: postgres: @@ -178,7 +178,7 @@ jobs: coverage xml - name: Upload coverage to Codecov if: ${{ !cancelled() }} - uses: codecov/codecov-action@v5.1.2 + uses: codecov/codecov-action@v5.3.1 with: token: ${{ secrets.CODECOV_TOKEN }} files: coverage.xml diff --git a/.github/workflows/test-integrations-flags.yml b/.github/workflows/test-integrations-flags.yml index 8694bc40a9..6e4033389e 100644 --- a/.github/workflows/test-integrations-flags.yml +++ b/.github/workflows/test-integrations-flags.yml @@ -22,60 +22,6 @@ env: CACHED_BUILD_PATHS: | ${{ github.workspace }}/dist-serverless jobs: - test-flags-latest: - name: Flags (latest) - timeout-minutes: 30 - runs-on: ${{ matrix.os }} - strategy: - fail-fast: false - matrix: - python-version: ["3.8","3.12","3.13"] - os: [ubuntu-22.04] - steps: - - uses: actions/checkout@v4.2.2 - - uses: actions/setup-python@v5 - with: - python-version: ${{ matrix.python-version }} - allow-prereleases: true - - name: Setup Test Env - run: | - pip install "coverage[toml]" tox - - name: Erase coverage - run: | - coverage erase - - name: Test launchdarkly latest - run: | - set -x # print commands that are executed - ./scripts/runtox.sh "py${{ matrix.python-version }}-launchdarkly-latest" - - name: Test openfeature latest - run: | - set -x # print commands that are executed - ./scripts/runtox.sh "py${{ matrix.python-version }}-openfeature-latest" - - name: Test unleash latest - run: | - set -x # print commands that are executed - ./scripts/runtox.sh "py${{ matrix.python-version }}-unleash-latest" - - name: Generate coverage XML - if: ${{ !cancelled() }} - run: | - coverage combine .coverage-sentry-* - coverage xml - - name: Upload coverage to Codecov - if: ${{ !cancelled() }} - uses: codecov/codecov-action@v5.1.2 - with: - token: ${{ secrets.CODECOV_TOKEN }} - files: coverage.xml - # make sure no plugins alter our coverage reports - plugin: noop - verbose: true - - name: Upload test results to Codecov - if: ${{ !cancelled() }} - uses: codecov/test-results-action@v1 - with: - token: ${{ secrets.CODECOV_TOKEN }} - files: .junitxml - verbose: true test-flags-pinned: name: Flags (pinned) timeout-minutes: 30 @@ -83,7 +29,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: ["3.8","3.12","3.13"] + python-version: ["3.7","3.8","3.9","3.12","3.13"] os: [ubuntu-22.04] steps: - uses: actions/checkout@v4.2.2 @@ -105,6 +51,10 @@ jobs: run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-openfeature" + - name: Test statsig pinned + run: | + set -x # print commands that are executed + ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-statsig" - name: Test unleash pinned run: | set -x # print commands that are executed @@ -116,7 +66,7 @@ jobs: coverage xml - name: Upload coverage to Codecov if: ${{ !cancelled() }} - uses: codecov/codecov-action@v5.1.2 + uses: codecov/codecov-action@v5.3.1 with: token: ${{ secrets.CODECOV_TOKEN }} files: coverage.xml diff --git a/.github/workflows/test-integrations-gevent.yml b/.github/workflows/test-integrations-gevent.yml index 2517c18cce..bb371d1717 100644 --- a/.github/workflows/test-integrations-gevent.yml +++ b/.github/workflows/test-integrations-gevent.yml @@ -54,7 +54,7 @@ jobs: coverage xml - name: Upload coverage to Codecov if: ${{ !cancelled() }} - uses: codecov/codecov-action@v5.1.2 + uses: codecov/codecov-action@v5.3.1 with: token: ${{ secrets.CODECOV_TOKEN }} files: coverage.xml diff --git a/.github/workflows/test-integrations-graphql.yml b/.github/workflows/test-integrations-graphql.yml index 7b6571619a..2fdd46422b 100644 --- a/.github/workflows/test-integrations-graphql.yml +++ b/.github/workflows/test-integrations-graphql.yml @@ -22,64 +22,6 @@ env: CACHED_BUILD_PATHS: | ${{ github.workspace }}/dist-serverless jobs: - test-graphql-latest: - name: GraphQL (latest) - timeout-minutes: 30 - runs-on: ${{ matrix.os }} - strategy: - fail-fast: false - matrix: - python-version: ["3.7","3.8","3.12","3.13"] - os: [ubuntu-22.04] - steps: - - uses: actions/checkout@v4.2.2 - - uses: actions/setup-python@v5 - with: - python-version: ${{ matrix.python-version }} - allow-prereleases: true - - name: Setup Test Env - run: | - pip install "coverage[toml]" tox - - name: Erase coverage - run: | - coverage erase - - name: Test ariadne latest - run: | - set -x # print commands that are executed - ./scripts/runtox.sh "py${{ matrix.python-version }}-ariadne-latest" - - name: Test gql latest - run: | - set -x # print commands that are executed - ./scripts/runtox.sh "py${{ matrix.python-version }}-gql-latest" - - name: Test graphene latest - run: | - set -x # print commands that are executed - ./scripts/runtox.sh "py${{ matrix.python-version }}-graphene-latest" - - name: Test strawberry latest - run: | - set -x # print commands that are executed - ./scripts/runtox.sh "py${{ matrix.python-version }}-strawberry-latest" - - name: Generate coverage XML - if: ${{ !cancelled() }} - run: | - coverage combine .coverage-sentry-* - coverage xml - - name: Upload coverage to Codecov - if: ${{ !cancelled() }} - uses: codecov/codecov-action@v5.1.2 - with: - token: ${{ secrets.CODECOV_TOKEN }} - files: coverage.xml - # make sure no plugins alter our coverage reports - plugin: noop - verbose: true - - name: Upload test results to Codecov - if: ${{ !cancelled() }} - uses: codecov/test-results-action@v1 - with: - token: ${{ secrets.CODECOV_TOKEN }} - files: .junitxml - verbose: true test-graphql-pinned: name: GraphQL (pinned) timeout-minutes: 30 @@ -87,7 +29,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: ["3.7","3.8","3.11","3.12"] + python-version: ["3.7","3.8","3.9","3.10","3.11","3.12","3.13"] os: [ubuntu-22.04] steps: - uses: actions/checkout@v4.2.2 @@ -124,7 +66,7 @@ jobs: coverage xml - name: Upload coverage to Codecov if: ${{ !cancelled() }} - uses: codecov/codecov-action@v5.1.2 + uses: codecov/codecov-action@v5.3.1 with: token: ${{ secrets.CODECOV_TOKEN }} files: coverage.xml diff --git a/.github/workflows/test-integrations-misc.yml b/.github/workflows/test-integrations-misc.yml index 013a21646c..b70d37702d 100644 --- a/.github/workflows/test-integrations-misc.yml +++ b/.github/workflows/test-integrations-misc.yml @@ -22,72 +22,6 @@ env: CACHED_BUILD_PATHS: | ${{ github.workspace }}/dist-serverless jobs: - test-misc-latest: - name: Misc (latest) - timeout-minutes: 30 - runs-on: ${{ matrix.os }} - strategy: - fail-fast: false - matrix: - python-version: ["3.7","3.8","3.12","3.13"] - os: [ubuntu-22.04] - steps: - - uses: actions/checkout@v4.2.2 - - uses: actions/setup-python@v5 - with: - python-version: ${{ matrix.python-version }} - allow-prereleases: true - - name: Setup Test Env - run: | - pip install "coverage[toml]" tox - - name: Erase coverage - run: | - coverage erase - - name: Test loguru latest - run: | - set -x # print commands that are executed - ./scripts/runtox.sh "py${{ matrix.python-version }}-loguru-latest" - - name: Test opentelemetry latest - run: | - set -x # print commands that are executed - ./scripts/runtox.sh "py${{ matrix.python-version }}-opentelemetry-latest" - - name: Test potel latest - run: | - set -x # print commands that are executed - ./scripts/runtox.sh "py${{ matrix.python-version }}-potel-latest" - - name: Test pure_eval latest - run: | - set -x # print commands that are executed - ./scripts/runtox.sh "py${{ matrix.python-version }}-pure_eval-latest" - - name: Test trytond latest - run: | - set -x # print commands that are executed - ./scripts/runtox.sh "py${{ matrix.python-version }}-trytond-latest" - - name: Test typer latest - run: | - set -x # print commands that are executed - ./scripts/runtox.sh "py${{ matrix.python-version }}-typer-latest" - - name: Generate coverage XML - if: ${{ !cancelled() }} - run: | - coverage combine .coverage-sentry-* - coverage xml - - name: Upload coverage to Codecov - if: ${{ !cancelled() }} - uses: codecov/codecov-action@v5.1.2 - with: - token: ${{ secrets.CODECOV_TOKEN }} - files: coverage.xml - # make sure no plugins alter our coverage reports - plugin: noop - verbose: true - - name: Upload test results to Codecov - if: ${{ !cancelled() }} - uses: codecov/test-results-action@v1 - with: - token: ${{ secrets.CODECOV_TOKEN }} - files: .junitxml - verbose: true test-misc-pinned: name: Misc (pinned) timeout-minutes: 30 @@ -140,7 +74,7 @@ jobs: coverage xml - name: Upload coverage to Codecov if: ${{ !cancelled() }} - uses: codecov/codecov-action@v5.1.2 + uses: codecov/codecov-action@v5.3.1 with: token: ${{ secrets.CODECOV_TOKEN }} files: coverage.xml diff --git a/.github/workflows/test-integrations-network.yml b/.github/workflows/test-integrations-network.yml index a6cafb2b66..bea1a553bd 100644 --- a/.github/workflows/test-integrations-network.yml +++ b/.github/workflows/test-integrations-network.yml @@ -29,7 +29,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: ["3.8","3.9","3.11","3.12","3.13"] + python-version: ["3.9","3.12","3.13"] os: [ubuntu-22.04] steps: - uses: actions/checkout@v4.2.2 @@ -62,7 +62,7 @@ jobs: coverage xml - name: Upload coverage to Codecov if: ${{ !cancelled() }} - uses: codecov/codecov-action@v5.1.2 + uses: codecov/codecov-action@v5.3.1 with: token: ${{ secrets.CODECOV_TOKEN }} files: coverage.xml @@ -116,7 +116,7 @@ jobs: coverage xml - name: Upload coverage to Codecov if: ${{ !cancelled() }} - uses: codecov/codecov-action@v5.1.2 + uses: codecov/codecov-action@v5.3.1 with: token: ${{ secrets.CODECOV_TOKEN }} files: coverage.xml diff --git a/.github/workflows/test-integrations-tasks.yml b/.github/workflows/test-integrations-tasks.yml index 1b232c4d64..a727058bbc 100644 --- a/.github/workflows/test-integrations-tasks.yml +++ b/.github/workflows/test-integrations-tasks.yml @@ -84,7 +84,7 @@ jobs: coverage xml - name: Upload coverage to Codecov if: ${{ !cancelled() }} - uses: codecov/codecov-action@v5.1.2 + uses: codecov/codecov-action@v5.3.1 with: token: ${{ secrets.CODECOV_TOKEN }} files: coverage.xml @@ -105,7 +105,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: ["3.7","3.8","3.9","3.10","3.11","3.12"] + python-version: ["3.7","3.8","3.9","3.10","3.11","3.12","3.13"] os: [ubuntu-22.04] steps: - uses: actions/checkout@v4.2.2 @@ -160,7 +160,7 @@ jobs: coverage xml - name: Upload coverage to Codecov if: ${{ !cancelled() }} - uses: codecov/codecov-action@v5.1.2 + uses: codecov/codecov-action@v5.3.1 with: token: ${{ secrets.CODECOV_TOKEN }} files: coverage.xml diff --git a/.github/workflows/test-integrations-web-1.yml b/.github/workflows/test-integrations-web-1.yml index 1b3163c7ba..0f4399d439 100644 --- a/.github/workflows/test-integrations-web-1.yml +++ b/.github/workflows/test-integrations-web-1.yml @@ -84,7 +84,7 @@ jobs: coverage xml - name: Upload coverage to Codecov if: ${{ !cancelled() }} - uses: codecov/codecov-action@v5.1.2 + uses: codecov/codecov-action@v5.3.1 with: token: ${{ secrets.CODECOV_TOKEN }} files: coverage.xml @@ -105,7 +105,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: ["3.7","3.8","3.9","3.10","3.11","3.12"] + python-version: ["3.7","3.8","3.9","3.10","3.11","3.12","3.13"] os: [ubuntu-22.04] services: postgres: @@ -160,7 +160,7 @@ jobs: coverage xml - name: Upload coverage to Codecov if: ${{ !cancelled() }} - uses: codecov/codecov-action@v5.1.2 + uses: codecov/codecov-action@v5.3.1 with: token: ${{ secrets.CODECOV_TOKEN }} files: coverage.xml diff --git a/.github/workflows/test-integrations-web-2.yml b/.github/workflows/test-integrations-web-2.yml index fdd6c5ede3..9706cbca3e 100644 --- a/.github/workflows/test-integrations-web-2.yml +++ b/.github/workflows/test-integrations-web-2.yml @@ -29,7 +29,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: ["3.7","3.8","3.9","3.11","3.12","3.13"] + python-version: ["3.8","3.9","3.11","3.12","3.13"] os: [ubuntu-22.04] steps: - uses: actions/checkout@v4.2.2 @@ -90,7 +90,7 @@ jobs: coverage xml - name: Upload coverage to Codecov if: ${{ !cancelled() }} - uses: codecov/codecov-action@v5.1.2 + uses: codecov/codecov-action@v5.3.1 with: token: ${{ secrets.CODECOV_TOKEN }} files: coverage.xml @@ -111,7 +111,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: ["3.7","3.8","3.9","3.11","3.12","3.13"] + python-version: ["3.7","3.8","3.9","3.10","3.11","3.12","3.13"] os: [ubuntu-22.04] steps: - uses: actions/checkout@v4.2.2 @@ -172,7 +172,7 @@ jobs: coverage xml - name: Upload coverage to Codecov if: ${{ !cancelled() }} - uses: codecov/codecov-action@v5.1.2 + uses: codecov/codecov-action@v5.3.1 with: token: ${{ secrets.CODECOV_TOKEN }} files: coverage.xml diff --git a/scripts/populate_tox/tox.jinja b/scripts/populate_tox/tox.jinja index 15119b4768..fea23895f2 100644 --- a/scripts/populate_tox/tox.jinja +++ b/scripts/populate_tox/tox.jinja @@ -16,10 +16,10 @@ requires = virtualenv<20.26.3 envlist = # === Common === - {py3.6,py3.7,py3.8,py3.9,py3.10,py3.11,py3.12,py3.13}-common + {py3.7,py3.8,py3.9,py3.10,py3.11,py3.12,py3.13}-common # === Gevent === - {py3.6,py3.8,py3.10,py3.11,py3.12}-gevent + {py3.8,py3.10,py3.11,py3.12}-gevent # === Integrations === # General format is {pythonversion}-{integrationname}-v{frameworkversion} @@ -65,17 +65,17 @@ envlist = {py3.8,py3.11}-beam-latest # Boto3 - {py3.6,py3.7}-boto3-v{1.12} + {py3.7}-boto3-v{1.12} {py3.7,py3.11,py3.12}-boto3-v{1.23} {py3.11,py3.12}-boto3-v{1.34} {py3.11,py3.12,py3.13}-boto3-latest # Chalice - {py3.6,py3.9}-chalice-v{1.16} + {py3.7,py3.9}-chalice-v{1.16} {py3.8,py3.12,py3.13}-chalice-latest # Cloud Resource Context - {py3.6,py3.12,py3.13}-cloud_resource_context + {py3.7,py3.12,py3.13}-cloud_resource_context # Cohere {py3.9,py3.11,py3.12}-cohere-v5 @@ -83,13 +83,13 @@ envlist = # Django # - Django 1.x - {py3.6,py3.7}-django-v{1.11} + {py3.7}-django-v{1.11} # - Django 2.x - {py3.6,py3.7}-django-v{2.0} - {py3.6,py3.9}-django-v{2.2} + {py3.7}-django-v{2.0} + {py3.7,py3.9}-django-v{2.2} # - Django 3.x - {py3.6,py3.9}-django-v{3.0} - {py3.6,py3.9,py3.11}-django-v{3.2} + {py3.7,py3.9}-django-v{3.0} + {py3.7,py3.9,py3.11}-django-v{3.2} # - Django 4.x {py3.8,py3.11,py3.12}-django-v{4.0,4.1,4.2} # - Django 5.x @@ -104,8 +104,8 @@ envlist = {py3.7}-gcp # HTTPX - {py3.6,py3.9}-httpx-v{0.16,0.18} - {py3.6,py3.10}-httpx-v{0.20,0.22} + {py3.7,py3.9}-httpx-v{0.16,0.18} + {py3.7,py3.10}-httpx-v{0.20,0.22} {py3.7,py3.11,py3.12}-httpx-v{0.23,0.24} {py3.9,py3.11,py3.12}-httpx-v{0.25,0.27} {py3.9,py3.12,py3.13}-httpx-latest @@ -136,7 +136,7 @@ envlist = {py3.8,py3.9,py3.10,py3.11,py3.12,py3.13}-potel # pure_eval - {py3.6,py3.12,py3.13}-pure_eval + {py3.7,py3.12,py3.13}-pure_eval # Quart {py3.7,py3.11}-quart-v{0.16} @@ -148,24 +148,23 @@ envlist = {py3.10,py3.11}-ray-latest # Redis - {py3.6,py3.8}-redis-v{3} + {py3.7,py3.8}-redis-v{3} {py3.7,py3.8,py3.11}-redis-v{4} {py3.7,py3.11,py3.12}-redis-v{5} {py3.7,py3.12,py3.13}-redis-latest # Requests - {py3.6,py3.8,py3.12,py3.13}-requests + {py3.7,py3.8,py3.12,py3.13}-requests # RQ (Redis Queue) - {py3.6}-rq-v{0.6} - {py3.6,py3.9}-rq-v{0.13,1.0} - {py3.6,py3.11}-rq-v{1.5,1.10} + {py3.7,py3.9}-rq-v{0.13,1.0} + {py3.7,py3.11}-rq-v{1.5,1.10} {py3.7,py3.11,py3.12}-rq-v{1.15,1.16} {py3.7,py3.12,py3.13}-rq-latest # Sanic - {py3.6,py3.7}-sanic-v{0.8} - {py3.6,py3.8}-sanic-v{20} + {py3.7}-sanic-v{0.8} + {py3.8}-sanic-v{20} {py3.8,py3.11,py3.12}-sanic-v{24.6} {py3.9,py3.12,py3.13}-sanic-latest @@ -200,16 +199,16 @@ deps = # See https://github.com/pytest-dev/pytest/issues/9621 # and https://github.com/pytest-dev/pytest-forked/issues/67 # for justification of the upper bound on pytest - {py3.6,py3.7}-common: pytest<7.0.0 + py3.7-common: pytest<7.0.0 {py3.8,py3.9,py3.10,py3.11,py3.12,py3.13}-common: pytest # === Gevent === - {py3.6,py3.7,py3.8,py3.9,py3.10,py3.11}-gevent: gevent>=22.10.0, <22.11.0 + {py3.7,py3.8,py3.9,py3.10,py3.11}-gevent: gevent>=22.10.0, <22.11.0 {py3.12}-gevent: gevent # See https://github.com/pytest-dev/pytest/issues/9621 # and https://github.com/pytest-dev/pytest-forked/issues/67 # for justification of the upper bound on pytest - {py3.6,py3.7}-gevent: pytest<7.0.0 + py3.7-gevent: pytest<7.0.0 {py3.8,py3.9,py3.10,py3.11,py3.12}-gevent: pytest # === Integrations === @@ -397,7 +396,7 @@ deps = # Redis redis: fakeredis!=1.7.4 redis: pytest<8.0.0 - {py3.6,py3.7}-redis: fakeredis!=2.26.0 # https://github.com/cunla/fakeredis-py/issues/341 + py3.7-redis: fakeredis!=2.26.0 # https://github.com/cunla/fakeredis-py/issues/341 {py3.7,py3.8,py3.9,py3.10,py3.11,py3.12,py3.13}-redis: pytest-asyncio redis-v3: redis~=3.0 redis-v4: redis~=4.0 @@ -409,13 +408,11 @@ deps = # RQ (Redis Queue) # https://github.com/jamesls/fakeredis/issues/245 - rq-v{0.6}: fakeredis<1.0 - rq-v{0.6}: redis<3.2.2 rq-v{0.13,1.0,1.5,1.10}: fakeredis>=1.0,<1.7.4 rq-v{1.15,1.16}: fakeredis - {py3.6,py3.7}-rq-v{1.15,1.16}: fakeredis!=2.26.0 # https://github.com/cunla/fakeredis-py/issues/341 + py3.7-rq-v{1.15,1.16}: fakeredis!=2.26.0 # https://github.com/cunla/fakeredis-py/issues/341 rq-latest: fakeredis - {py3.6,py3.7}-rq-latest: fakeredis!=2.26.0 # https://github.com/cunla/fakeredis-py/issues/341 + py3.7-rq-latest: fakeredis!=2.26.0 # https://github.com/cunla/fakeredis-py/issues/341 rq-v0.6: rq~=0.6.0 rq-v0.13: rq~=0.13.0 rq-v1.0: rq~=1.0.0 @@ -430,7 +427,6 @@ deps = sanic: aiohttp sanic-v{24.6}: sanic_testing sanic-latest: sanic_testing - {py3.6}-sanic: aiocontextvars==0.2.1 sanic-v0.8: sanic~=0.8.0 sanic-v20: sanic~=20.0 sanic-v24.6: sanic~=24.6.0 @@ -462,9 +458,9 @@ setenv = PYTHONDONTWRITEBYTECODE=1 OBJC_DISABLE_INITIALIZE_FORK_SAFETY=YES COVERAGE_FILE=.coverage-sentry-{envname} - py3.6: COVERAGE_RCFILE=.coveragerc36 django: DJANGO_SETTINGS_MODULE=tests.integrations.django.myapp.settings + py3.12-django: PIP_CONSTRAINT=constraints.txt common: TESTPATH=tests gevent: TESTPATH=tests @@ -542,7 +538,6 @@ extras = pymongo: pymongo basepython = - py3.6: python3.6 py3.7: python3.7 py3.8: python3.8 py3.9: python3.9 diff --git a/tox.ini b/tox.ini index 72e8e055cd..ebc6df8227 100644 --- a/tox.ini +++ b/tox.ini @@ -2,6 +2,13 @@ # in multiple virtualenvs. This configuration file will run the # test suite on all supported python versions. To use it, "pip install tox" # and then run "tox" from this directory. +# +# This file has been generated from a template +# by "scripts/populate_tox/populate_tox.py". Any changes to the file should +# be made in the template (if you want to change a hardcoded part of the file) +# or in the script (if you want to change the auto-generated part). +# The file (and all resulting CI YAMLs) then need to be regenerated via +# "scripts/generate-test-files.sh". [tox] requires = @@ -36,10 +43,6 @@ envlist = {py3.8,py3.11,py3.12}-anthropic-v{0.16,0.28,0.40} {py3.7,py3.11,py3.12}-anthropic-latest - # Ariadne - {py3.8,py3.11}-ariadne-v{0.20} - {py3.8,py3.12,py3.13}-ariadne-latest - # Arq {py3.7,py3.11}-arq-v{0.23} {py3.7,py3.12,py3.13}-arq-latest @@ -67,25 +70,10 @@ envlist = {py3.11,py3.12}-boto3-v{1.34} {py3.11,py3.12,py3.13}-boto3-latest - # Bottle - {py3.7,py3.9}-bottle-v{0.12} - {py3.7,py3.12,py3.13}-bottle-latest - - # Celery - {py3.8}-celery-v{4} - {py3.8}-celery-v{5.0} - {py3.8,py3.10}-celery-v{5.1,5.2} - {py3.8,py3.11,py3.12}-celery-v{5.3,5.4,5.5} - {py3.8,py3.12,py3.13}-celery-latest - # Chalice {py3.7,py3.9}-chalice-v{1.16} {py3.8,py3.12,py3.13}-chalice-latest - # Clickhouse Driver - {py3.8,py3.11}-clickhouse_driver-v{0.2.0} - {py3.8,py3.12,py3.13}-clickhouse_driver-latest - # Cloud Resource Context {py3.7,py3.12,py3.13}-cloud_resource_context @@ -108,45 +96,13 @@ envlist = {py3.10,py3.11,py3.12}-django-v{5.0,5.1} {py3.10,py3.12,py3.13}-django-latest - # dramatiq - {py3.7,py3.9}-dramatiq-v{1.13} - {py3.7,py3.10,py3.11}-dramatiq-v{1.15} - {py3.8,py3.11,py3.12}-dramatiq-v{1.17} - {py3.8,py3.11,py3.12}-dramatiq-latest - - # Falcon - {py3.7}-falcon-v{1,1.4,2} - {py3.7,py3.11,py3.12}-falcon-v{3} - {py3.8,py3.11,py3.12}-falcon-v{4} - {py3.7,py3.11,py3.12}-falcon-latest - # FastAPI {py3.7,py3.10}-fastapi-v{0.79} {py3.8,py3.12,py3.13}-fastapi-latest - # Flask - {py3.7,py3.8}-flask-v{1} - {py3.8,py3.11,py3.12}-flask-v{2} - {py3.10,py3.11,py3.12}-flask-v{3} - {py3.10,py3.12,py3.13}-flask-latest - # GCP {py3.7}-gcp - # GQL - {py3.7,py3.11}-gql-v{3.4} - {py3.7,py3.12,py3.13}-gql-latest - - # Graphene - {py3.7,py3.11}-graphene-v{3.3} - {py3.7,py3.12,py3.13}-graphene-latest - - # gRPC - {py3.7,py3.9}-grpc-v{1.39} - {py3.7,py3.10}-grpc-v{1.49} - {py3.7,py3.11}-grpc-v{1.59} - {py3.8,py3.11,py3.12}-grpc-latest - # HTTPX {py3.7,py3.9}-httpx-v{0.16,0.18} {py3.7,py3.10}-httpx-v{0.20,0.22} @@ -154,34 +110,18 @@ envlist = {py3.9,py3.11,py3.12}-httpx-v{0.25,0.27} {py3.9,py3.12,py3.13}-httpx-latest - # Huey - {py3.7,py3.11,py3.12}-huey-v{2.0} - {py3.7,py3.12,py3.13}-huey-latest - - # Huggingface Hub - {py3.9,py3.12,py3.13}-huggingface_hub-{v0.22} - {py3.9,py3.12,py3.13}-huggingface_hub-latest - # Langchain {py3.9,py3.11,py3.12}-langchain-v0.1 {py3.9,py3.11,py3.12}-langchain-v0.3 {py3.9,py3.11,py3.12}-langchain-latest {py3.9,py3.11,py3.12}-langchain-notiktoken - # LaunchDarkly - {py3.8,py3.12,py3.13}-launchdarkly-v9.8.0 - {py3.8,py3.12,py3.13}-launchdarkly-latest - # Litestar {py3.8,py3.11}-litestar-v{2.0} {py3.8,py3.11,py3.12}-litestar-v{2.6} {py3.8,py3.11,py3.12}-litestar-v{2.12} {py3.8,py3.11,py3.12}-litestar-latest - # Loguru - {py3.7,py3.11,py3.12}-loguru-v{0.5} - {py3.7,py3.12,py3.13}-loguru-latest - # OpenAI {py3.9,py3.11,py3.12}-openai-v1.0 {py3.9,py3.11,py3.12}-openai-v1.22 @@ -189,10 +129,6 @@ envlist = {py3.9,py3.11,py3.12}-openai-latest {py3.9,py3.11,py3.12}-openai-notiktoken - # OpenFeature - {py3.8,py3.12,py3.13}-openfeature-v0.7 - {py3.8,py3.12,py3.13}-openfeature-latest - # OpenTelemetry (OTel) {py3.7,py3.9,py3.12,py3.13}-opentelemetry @@ -202,19 +138,6 @@ envlist = # pure_eval {py3.7,py3.12,py3.13}-pure_eval - # PyMongo (Mongo DB) - {py3.7}-pymongo-v{3.7} - {py3.7,py3.9}-pymongo-v{3.12} - {py3.7,py3.11}-pymongo-v{4.0} - {py3.7,py3.11,py3.12}-pymongo-v{4.3,4.7} - {py3.7,py3.12,py3.13}-pymongo-latest - - # Pyramid - {py3.7,py3.11}-pyramid-v{1.6} - {py3.7,py3.11,py3.12}-pyramid-v{1.10} - {py3.7,py3.11,py3.12}-pyramid-v{2.0} - {py3.7,py3.11,py3.12}-pyramid-latest - # Quart {py3.7,py3.11}-quart-v{0.16} {py3.8,py3.11,py3.12}-quart-v{0.19} @@ -230,10 +153,6 @@ envlist = {py3.7,py3.11,py3.12}-redis-v{5} {py3.7,py3.12,py3.13}-redis-latest - # Redis Cluster - {py3.7,py3.8}-redis_py_cluster_legacy-v{1,2} - # no -latest, not developed anymore - # Requests {py3.7,py3.8,py3.12,py3.13}-requests @@ -249,48 +168,128 @@ envlist = {py3.8,py3.11,py3.12}-sanic-v{24.6} {py3.9,py3.12,py3.13}-sanic-latest - # Spark - {py3.8,py3.10,py3.11}-spark-v{3.1,3.3,3.5} - {py3.8,py3.10,py3.11,py3.12}-spark-latest - - # Starlette - {py3.7,py3.10}-starlette-v{0.19} - {py3.7,py3.11}-starlette-v{0.24,0.28} - {py3.8,py3.11,py3.12}-starlette-v{0.32,0.36,0.40} - {py3.8,py3.12,py3.13}-starlette-latest - - # Starlite - {py3.8,py3.11}-starlite-v{1.48,1.51} - # 1.51.14 is the last starlite version; the project continues as litestar - - # SQL Alchemy - {py3.7,py3.9}-sqlalchemy-v{1.2,1.4} - {py3.7,py3.11}-sqlalchemy-v{2.0} - {py3.7,py3.12,py3.13}-sqlalchemy-latest - - # Strawberry - {py3.8,py3.11}-strawberry-v{0.209} - {py3.8,py3.11,py3.12}-strawberry-v{0.222} - {py3.8,py3.12,py3.13}-strawberry-latest - - # Tornado - {py3.8,py3.11,py3.12}-tornado-v{6.0} - {py3.8,py3.11,py3.12}-tornado-v{6.2} - {py3.8,py3.11,py3.12}-tornado-latest - - # Trytond - {py3.7,py3.8}-trytond-v{5} - {py3.7,py3.11}-trytond-v{6} - {py3.8,py3.11,py3.12}-trytond-v{7} - {py3.8,py3.12,py3.13}-trytond-latest - - # Typer - {py3.7,py3.12,py3.13}-typer-v{0.15} - {py3.7,py3.12,py3.13}-typer-latest - - # Unleash + # === Integrations - Auto-generated === + # These come from the populate_tox.py script. Eventually we should move all + # integration tests there. + + # ~~~ DBs ~~~ + {py3.7,py3.11,py3.12}-clickhouse_driver-v0.2.9 + + {py3.7}-pymongo-v3.7.2 + {py3.7,py3.10,py3.11}-pymongo-v3.13.0 + {py3.7,py3.9,py3.10}-pymongo-v4.0.2 + {py3.9,py3.12,py3.13}-pymongo-v4.11.1 + + {py3.7}-redis_py_cluster_legacy-v2.0.0 + {py3.7,py3.8}-redis_py_cluster_legacy-v2.1.3 + + {py3.7}-sqlalchemy-v1.3.9 + {py3.7,py3.11,py3.12}-sqlalchemy-v1.4.54 + {py3.7,py3.10,py3.11}-sqlalchemy-v2.0.9 + {py3.7,py3.12,py3.13}-sqlalchemy-v2.0.38 + + + # ~~~ Flags ~~~ + {py3.8,py3.12,py3.13}-launchdarkly-v9.8.1 + {py3.8,py3.12,py3.13}-launchdarkly-v9.9.0 + + {py3.8,py3.12,py3.13}-openfeature-v0.7.5 + {py3.9,py3.12,py3.13}-openfeature-v0.8.0 + + {py3.7,py3.12,py3.13}-statsig-v0.55.3 + {py3.7,py3.12,py3.13}-statsig-v0.56.0 + {py3.8,py3.12,py3.13}-unleash-v6.0.1 - {py3.8,py3.12,py3.13}-unleash-latest + {py3.8,py3.12,py3.13}-unleash-v6.1.0 + + + # ~~~ GraphQL ~~~ + {py3.8,py3.10,py3.11}-ariadne-v0.20.1 + {py3.8,py3.11,py3.12}-ariadne-v0.22 + {py3.8,py3.11,py3.12}-ariadne-v0.24.0 + {py3.8,py3.11,py3.12}-ariadne-v0.25.2 + + {py3.7,py3.9,py3.10}-gql-v3.4.1 + {py3.7,py3.11,py3.12}-gql-v3.5.0 + + {py3.7,py3.9,py3.10}-graphene-v3.3 + {py3.8,py3.12,py3.13}-graphene-v3.4.3 + + {py3.8,py3.10,py3.11}-strawberry-v0.209.8 + {py3.8,py3.11,py3.12}-strawberry-v0.226.2 + {py3.8,py3.11,py3.12}-strawberry-v0.243.1 + {py3.9,py3.12,py3.13}-strawberry-v0.260.2 + + + # ~~~ Network ~~~ + {py3.7,py3.8}-grpc-v1.32.0 + {py3.7,py3.9,py3.10}-grpc-v1.44.0 + {py3.7,py3.10,py3.11}-grpc-v1.58.3 + {py3.8,py3.12,py3.13}-grpc-v1.70.0 + + + # ~~~ Tasks ~~~ + {py3.7,py3.8}-celery-v4.4.7 + {py3.7,py3.8}-celery-v5.0.5 + {py3.8,py3.11,py3.12}-celery-v5.4.0 + + {py3.7}-dramatiq-v1.9.0 + {py3.7,py3.8,py3.9}-dramatiq-v1.12.3 + {py3.7,py3.10,py3.11}-dramatiq-v1.15.0 + {py3.8,py3.12,py3.13}-dramatiq-v1.17.1 + + {py3.8,py3.9}-spark-v3.0.3 + {py3.8,py3.9}-spark-v3.2.4 + {py3.8,py3.10,py3.11}-spark-v3.4.4 + {py3.8,py3.10,py3.11}-spark-v3.5.4 + + + # ~~~ Web 1 ~~~ + {py3.7,py3.8}-flask-v1.1.4 + {py3.8,py3.12,py3.13}-flask-v2.3.3 + {py3.8,py3.12,py3.13}-flask-v3.0.3 + {py3.9,py3.12,py3.13}-flask-v3.1.0 + + {py3.7,py3.9,py3.10}-starlette-v0.16.0 + {py3.7,py3.10,py3.11}-starlette-v0.26.1 + {py3.8,py3.11,py3.12}-starlette-v0.36.3 + {py3.9,py3.12,py3.13}-starlette-v0.45.3 + + + # ~~~ Web 2 ~~~ + {py3.7}-bottle-v0.12.25 + {py3.7,py3.8,py3.9}-bottle-v0.13.2 + + {py3.7}-falcon-v2.0.0 + {py3.7,py3.11,py3.12}-falcon-v3.1.3 + {py3.8,py3.11,py3.12}-falcon-v4.0.2 + + {py3.7,py3.8,py3.9}-pyramid-v1.10.8 + {py3.7,py3.10,py3.11}-pyramid-v2.0.2 + + {py3.8,py3.10,py3.11}-starlite-v1.48.1 + {py3.8,py3.10,py3.11}-starlite-v1.49.0 + {py3.8,py3.10,py3.11}-starlite-v1.50.2 + {py3.8,py3.10,py3.11}-starlite-v1.51.16 + + {py3.7,py3.8}-tornado-v6.0.4 + {py3.7,py3.8,py3.9}-tornado-v6.1 + {py3.7,py3.9,py3.10}-tornado-v6.2 + {py3.8,py3.10,py3.11}-tornado-v6.4.2 + + + # ~~~ Misc ~~~ + {py3.7,py3.12,py3.13}-loguru-v0.7.3 + + {py3.7}-trytond-v5.0.9 + {py3.7,py3.8}-trytond-v5.8.16 + {py3.8,py3.10,py3.11}-trytond-v6.8.17 + {py3.8,py3.11,py3.12}-trytond-v7.0.9 + {py3.8,py3.11,py3.12}-trytond-v7.4.6 + + {py3.7,py3.11,py3.12}-typer-v0.15.1 + + [testenv] deps = @@ -338,13 +337,6 @@ deps = anthropic-v0.40: anthropic~=0.40.0 anthropic-latest: anthropic - # Ariadne - ariadne-v0.20: ariadne~=0.20.0 - ariadne-latest: ariadne - ariadne: fastapi - ariadne: flask - ariadne: httpx - # Arq arq-v0.23: arq~=0.23.0 arq-v0.23: pydantic<2 @@ -375,40 +367,18 @@ deps = boto3-v1.34: boto3~=1.34.0 boto3-latest: boto3 - # Bottle - bottle: Werkzeug<2.1.0 - bottle-v0.12: bottle~=0.12.0 - bottle-latest: bottle - - # Celery - celery: redis - celery: newrelic - celery-v4: Celery~=4.0 - celery-v5.0: Celery~=5.0.0 - celery-v5.1: Celery~=5.1.0 - celery-v5.2: Celery~=5.2.0 - celery-v5.3: Celery~=5.3.0 - celery-v5.4: Celery~=5.4.0 - # TODO: update when stable is out - celery-v5.5: Celery==5.5.0rc4 - celery-latest: Celery - # Chalice chalice: pytest-chalice==0.0.5 chalice-v1.16: chalice~=1.16.0 chalice-latest: chalice - # Clickhouse Driver - clickhouse_driver-v0.2.0: clickhouse_driver~=0.2.0 - clickhouse_driver-latest: clickhouse_driver - # Cohere cohere-v5: cohere~=5.3.3 cohere-latest: cohere # Django django: psycopg2-binary - django-v{1.11,2.0,2.2,3.0,3.1,3.2}: djangorestframework>=3.0.0,<4.0.0 + django-v{1.11,2.0,2.1,2.2,3.0,3.1,3.2}: djangorestframework>=3.0.0,<4.0.0 django-v{2.0,2.2,3.0,3.2,4.0,4.1,4.2,5.0,5.1}: channels[daphne] django-v{2.2,3.0}: six django-v{1.11,2.0,2.2,3.0,3.2}: Werkzeug<2.1.0 @@ -435,20 +405,6 @@ deps = django-v5.1: Django==5.1rc1 django-latest: Django - # dramatiq - dramatiq-v1.13: dramatiq>=1.13,<1.14 - dramatiq-v1.15: dramatiq>=1.15,<1.16 - dramatiq-v1.17: dramatiq>=1.17,<1.18 - dramatiq-latest: dramatiq - - # Falcon - falcon-v1.4: falcon~=1.4.0 - falcon-v1: falcon~=1.0 - falcon-v2: falcon~=2.0 - falcon-v3: falcon~=3.0 - falcon-v4: falcon~=4.0 - falcon-latest: falcon - # FastAPI fastapi: httpx # (this is a dependency of httpx) @@ -459,38 +415,6 @@ deps = fastapi-v{0.79}: fastapi~=0.79.0 fastapi-latest: fastapi - # Flask - flask: flask-login - flask-v{1,2.0}: Werkzeug<2.1.0 - flask-v{1,2.0}: markupsafe<2.1.0 - flask-v{3}: Werkzeug - flask-v1: Flask~=1.0 - flask-v2: Flask~=2.0 - flask-v3: Flask~=3.0 - flask-latest: Flask - - # GQL - gql-v{3.4}: gql[all]~=3.4.0 - gql-latest: gql[all] - - # Graphene - graphene: blinker - graphene: fastapi - graphene: flask - graphene: httpx - graphene-v{3.3}: graphene~=3.3.0 - graphene-latest: graphene - - # gRPC - grpc: protobuf - grpc: mypy-protobuf - grpc: types-protobuf - grpc: pytest-asyncio - grpc-v1.39: grpcio~=1.39.0 - grpc-v1.49: grpcio~=1.49.1 - grpc-v1.59: grpcio~=1.59.0 - grpc-latest: grpcio - # HTTPX httpx-v0.16: pytest-httpx==0.10.0 httpx-v0.18: pytest-httpx==0.12.0 @@ -512,14 +436,6 @@ deps = httpx-v0.27: httpx~=0.27.0 httpx-latest: httpx - # Huey - huey-v2.0: huey~=2.0.0 - huey-latest: huey - - # Huggingface Hub - huggingface_hub-v0.22: huggingface_hub~=0.22.2 - huggingface_hub-latest: huggingface_hub - # Langchain langchain-v0.1: openai~=1.0.0 langchain-v0.1: langchain~=0.1.11 @@ -545,10 +461,6 @@ deps = litestar-v2.12: litestar~=2.12.0 litestar-latest: litestar - # Loguru - loguru-v0.5: loguru~=0.5.0 - loguru-latest: loguru - # OpenAI openai: pytest-asyncio openai-v1.0: openai~=1.0.0 @@ -563,18 +475,6 @@ deps = openai-latest: tiktoken~=0.6.0 openai-notiktoken: openai - # OpenFeature - openfeature-v0.7: openfeature-sdk~=0.7.1 - openfeature-latest: openfeature-sdk - - # LaunchDarkly - launchdarkly-v9.8.0: launchdarkly-server-sdk~=9.8.0 - launchdarkly-latest: launchdarkly-server-sdk - - # Unleash - unleash-v6.0.1: UnleashClient~=6.0.1 - unleash-latest: UnleashClient - # OpenTelemetry (OTel) opentelemetry: opentelemetry-distro @@ -584,22 +484,6 @@ deps = # pure_eval pure_eval: pure_eval - # PyMongo (MongoDB) - pymongo: mockupdb - pymongo-v3.1: pymongo~=3.1.0 - pymongo-v3.13: pymongo~=3.13.0 - pymongo-v4.0: pymongo~=4.0.0 - pymongo-v4.3: pymongo~=4.3.0 - pymongo-v4.7: pymongo~=4.7.0 - pymongo-latest: pymongo - - # Pyramid - pyramid: Werkzeug<2.1.0 - pyramid-v1.6: pyramid~=1.6.0 - pyramid-v1.10: pyramid~=1.10.0 - pyramid-v2.0: pyramid~=2.0.0 - pyramid-latest: pyramid - # Quart quart: quart-auth quart: pytest-asyncio @@ -620,26 +504,23 @@ deps = # Redis redis: fakeredis!=1.7.4 redis: pytest<8.0.0 - {py3.6,py3.7}-redis: fakeredis!=2.26.0 # https://github.com/cunla/fakeredis-py/issues/341 + py3.7-redis: fakeredis!=2.26.0 # https://github.com/cunla/fakeredis-py/issues/341 {py3.7,py3.8,py3.9,py3.10,py3.11,py3.12,py3.13}-redis: pytest-asyncio redis-v3: redis~=3.0 redis-v4: redis~=4.0 redis-v5: redis~=5.0 redis-latest: redis - # Redis Cluster - redis_py_cluster_legacy-v1: redis-py-cluster~=1.0 - redis_py_cluster_legacy-v2: redis-py-cluster~=2.0 - # Requests requests: requests>=2.0 # RQ (Redis Queue) + # https://github.com/jamesls/fakeredis/issues/245 rq-v{0.13,1.0,1.5,1.10}: fakeredis>=1.0,<1.7.4 rq-v{1.15,1.16}: fakeredis py3.7-rq-v{1.15,1.16}: fakeredis!=2.26.0 # https://github.com/cunla/fakeredis-py/issues/341 - py3.7-rq-latest: fakeredis!=2.26.0 # https://github.com/cunla/fakeredis-py/issues/341 rq-latest: fakeredis + py3.7-rq-latest: fakeredis!=2.26.0 # https://github.com/cunla/fakeredis-py/issues/341 rq-v0.6: rq~=0.6.0 rq-v0.13: rq~=0.13.0 rq-v1.0: rq~=1.0.0 @@ -659,74 +540,174 @@ deps = sanic-v24.6: sanic~=24.6.0 sanic-latest: sanic - # Spark - spark-v3.1: pyspark~=3.1.0 - spark-v3.3: pyspark~=3.3.0 - spark-v3.5: pyspark~=3.5.0 - # TODO: update to ~=4.0.0 once stable is out - spark-v4.0: pyspark==4.0.0.dev2 - spark-latest: pyspark + # === Integrations - Auto-generated === + # These come from the populate_tox.py script. Eventually we should move all + # integration tests there. + + # ~~~ DBs ~~~ + clickhouse_driver-v0.2.9: clickhouse-driver==0.2.9 + + pymongo-v3.7.2: pymongo==3.7.2 + pymongo-v3.13.0: pymongo==3.13.0 + pymongo-v4.0.2: pymongo==4.0.2 + pymongo-v4.11.1: pymongo==4.11.1 + pymongo: mockupdb + + redis_py_cluster_legacy-v2.0.0: redis-py-cluster==2.0.0 + redis_py_cluster_legacy-v2.1.3: redis-py-cluster==2.1.3 - # Starlette + sqlalchemy-v1.3.9: sqlalchemy==1.3.9 + sqlalchemy-v1.4.54: sqlalchemy==1.4.54 + sqlalchemy-v2.0.9: sqlalchemy==2.0.9 + sqlalchemy-v2.0.38: sqlalchemy==2.0.38 + + + # ~~~ Flags ~~~ + launchdarkly-v9.8.1: launchdarkly-server-sdk==9.8.1 + launchdarkly-v9.9.0: launchdarkly-server-sdk==9.9.0 + + openfeature-v0.7.5: openfeature-sdk==0.7.5 + openfeature-v0.8.0: openfeature-sdk==0.8.0 + + statsig-v0.55.3: statsig==0.55.3 + statsig-v0.56.0: statsig==0.56.0 + statsig: typing_extensions + + unleash-v6.0.1: UnleashClient==6.0.1 + unleash-v6.1.0: UnleashClient==6.1.0 + + + # ~~~ GraphQL ~~~ + ariadne-v0.20.1: ariadne==0.20.1 + ariadne-v0.22: ariadne==0.22 + ariadne-v0.24.0: ariadne==0.24.0 + ariadne-v0.25.2: ariadne==0.25.2 + ariadne: fastapi + ariadne: flask + ariadne: httpx + + gql-v3.4.1: gql[all]==3.4.1 + gql-v3.5.0: gql[all]==3.5.0 + + graphene-v3.3: graphene==3.3 + graphene-v3.4.3: graphene==3.4.3 + graphene: blinker + graphene: fastapi + graphene: flask + graphene: httpx + py3.6-graphene: aiocontextvars + + strawberry-v0.209.8: strawberry-graphql[fastapi,flask]==0.209.8 + strawberry-v0.226.2: strawberry-graphql[fastapi,flask]==0.226.2 + strawberry-v0.243.1: strawberry-graphql[fastapi,flask]==0.243.1 + strawberry-v0.260.2: strawberry-graphql[fastapi,flask]==0.260.2 + strawberry: httpx + + + # ~~~ Network ~~~ + grpc-v1.32.0: grpcio==1.32.0 + grpc-v1.44.0: grpcio==1.44.0 + grpc-v1.58.3: grpcio==1.58.3 + grpc-v1.70.0: grpcio==1.70.0 + grpc: protobuf + grpc: mypy-protobuf + grpc: types-protobuf + grpc: pytest-asyncio + + + # ~~~ Tasks ~~~ + celery-v4.4.7: celery==4.4.7 + celery-v5.0.5: celery==5.0.5 + celery-v5.4.0: celery==5.4.0 + celery: newrelic + celery: redis + py3.7-celery: importlib-metadata<5.0 + + dramatiq-v1.9.0: dramatiq==1.9.0 + dramatiq-v1.12.3: dramatiq==1.12.3 + dramatiq-v1.15.0: dramatiq==1.15.0 + dramatiq-v1.17.1: dramatiq==1.17.1 + + spark-v3.0.3: pyspark==3.0.3 + spark-v3.2.4: pyspark==3.2.4 + spark-v3.4.4: pyspark==3.4.4 + spark-v3.5.4: pyspark==3.5.4 + + + # ~~~ Web 1 ~~~ + flask-v1.1.4: flask==1.1.4 + flask-v2.3.3: flask==2.3.3 + flask-v3.0.3: flask==3.0.3 + flask-v3.1.0: flask==3.1.0 + flask: flask-login + flask: werkzeug + flask-v1.1.4: werkzeug<2.1.0 + flask-v1.1.4: markupsafe<2.1.0 + + starlette-v0.16.0: starlette==0.16.0 + starlette-v0.26.1: starlette==0.26.1 + starlette-v0.36.3: starlette==0.36.3 + starlette-v0.45.3: starlette==0.45.3 starlette: pytest-asyncio starlette: python-multipart starlette: requests - # (this is a dependency of httpx) starlette: anyio<4.0.0 starlette: jinja2 - starlette-v{0.19,0.24,0.28,0.32,0.36}: httpx<0.28.0 - starlette-v0.40: httpx - starlette-latest: httpx - starlette-v0.19: starlette~=0.19.0 - starlette-v0.24: starlette~=0.24.0 - starlette-v0.28: starlette~=0.28.0 - starlette-v0.32: starlette~=0.32.0 - starlette-v0.36: starlette~=0.36.0 - starlette-v0.40: starlette~=0.40.0 - starlette-latest: starlette - - # Starlite + starlette: httpx + starlette-v0.16.0: httpx<0.28.0 + starlette-v0.26.1: httpx<0.28.0 + starlette-v0.36.3: httpx<0.28.0 + py3.6-starlette: aiocontextvars + + + # ~~~ Web 2 ~~~ + bottle-v0.12.25: bottle==0.12.25 + bottle-v0.13.2: bottle==0.13.2 + bottle: werkzeug<2.1.0 + + falcon-v2.0.0: falcon==2.0.0 + falcon-v3.1.3: falcon==3.1.3 + falcon-v4.0.2: falcon==4.0.2 + + pyramid-v1.10.8: pyramid==1.10.8 + pyramid-v2.0.2: pyramid==2.0.2 + pyramid: werkzeug<2.1.0 + + starlite-v1.48.1: starlite==1.48.1 + starlite-v1.49.0: starlite==1.49.0 + starlite-v1.50.2: starlite==1.50.2 + starlite-v1.51.16: starlite==1.51.16 starlite: pytest-asyncio starlite: python-multipart starlite: requests starlite: cryptography starlite: pydantic<2.0.0 starlite: httpx<0.28 - starlite-v{1.48}: starlite~=1.48.0 - starlite-v{1.51}: starlite~=1.51.0 - - # SQLAlchemy - sqlalchemy-v1.2: sqlalchemy~=1.2.0 - sqlalchemy-v1.4: sqlalchemy~=1.4.0 - sqlalchemy-v2.0: sqlalchemy~=2.0.0 - sqlalchemy-latest: sqlalchemy - - # Strawberry - strawberry: fastapi - strawberry: flask - strawberry: httpx - strawberry-v0.209: strawberry-graphql[fastapi,flask]~=0.209.0 - strawberry-v0.222: strawberry-graphql[fastapi,flask]~=0.222.0 - strawberry-latest: strawberry-graphql[fastapi,flask] - - # Tornado - # Tornado <6.4.1 is incompatible with Pytest ≥8.2 - # See https://github.com/tornadoweb/tornado/pull/3382. - tornado-{v6.0,v6.2}: pytest<8.2 - tornado-v6.0: tornado~=6.0.0 - tornado-v6.2: tornado~=6.2.0 - tornado-latest: tornado - - # Trytond + + tornado-v6.0.4: tornado==6.0.4 + tornado-v6.1: tornado==6.1 + tornado-v6.2: tornado==6.2 + tornado-v6.4.2: tornado==6.4.2 + tornado: pytest + tornado-v6.0.4: pytest<8.2 + tornado-v6.1: pytest<8.2 + tornado-v6.2: pytest<8.2 + py3.6-tornado: aiocontextvars + + + # ~~~ Misc ~~~ + loguru-v0.7.3: loguru==0.7.3 + + trytond-v5.0.9: trytond==5.0.9 + trytond-v5.8.16: trytond==5.8.16 + trytond-v6.8.17: trytond==6.8.17 + trytond-v7.0.9: trytond==7.0.9 + trytond-v7.4.6: trytond==7.4.6 trytond: werkzeug - trytond-v5: trytond~=5.0 - trytond-v6: trytond~=6.0 - trytond-v7: trytond~=7.0 - trytond-latest: trytond - # Typer - typer-v0.15: typer~=0.15.0 - typer-latest: typer + typer-v0.15.1: typer==0.15.1 + + setenv = PYTHONDONTWRITEBYTECODE=1 @@ -784,9 +765,10 @@ setenv = rq: TESTPATH=tests/integrations/rq sanic: TESTPATH=tests/integrations/sanic spark: TESTPATH=tests/integrations/spark + sqlalchemy: TESTPATH=tests/integrations/sqlalchemy starlette: TESTPATH=tests/integrations/starlette starlite: TESTPATH=tests/integrations/starlite - sqlalchemy: TESTPATH=tests/integrations/sqlalchemy + statsig: TESTPATH=tests/integrations/statsig strawberry: TESTPATH=tests/integrations/strawberry tornado: TESTPATH=tests/integrations/tornado trytond: TESTPATH=tests/integrations/trytond From bb324323adcc9366aba97d83c77d51fd9c282aa6 Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Mon, 17 Feb 2025 15:25:57 +0100 Subject: [PATCH 179/244] Fix mypy --- sentry_sdk/client.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/sentry_sdk/client.py b/sentry_sdk/client.py index 949c6fa7ee..c905785b8a 100644 --- a/sentry_sdk/client.py +++ b/sentry_sdk/client.py @@ -574,7 +574,9 @@ def _prepare_event( quantity=spans_before + 1, # +1 for the transaction itself ) else: - spans_delta = spans_before - len(new_event.get("spans", [])) + spans_delta = spans_before - len( + cast(List[Dict[str, object]], new_event.get("spans", [])) + ) if spans_delta > 0 and self.transport is not None: self.transport.record_lost_event( reason="before_send", data_category="span", quantity=spans_delta From 1fc4f85dce3d6204ede0554a614f6230c7e21e7a Mon Sep 17 00:00:00 2001 From: Ivana Kellyer Date: Mon, 24 Feb 2025 10:42:59 +0100 Subject: [PATCH 180/244] fix lint --- .../integrations/anthropic/test_anthropic.py | 20 +++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/tests/integrations/anthropic/test_anthropic.py b/tests/integrations/anthropic/test_anthropic.py index 253f1d4f33..c318331972 100644 --- a/tests/integrations/anthropic/test_anthropic.py +++ b/tests/integrations/anthropic/test_anthropic.py @@ -528,9 +528,9 @@ def test_streaming_create_message_with_input_json_delta( assert span["data"][SPANDATA.AI_INPUT_MESSAGES] == _serialize_span_attribute( messages ) - assert span["data"][SPANDATA.AI_RESPONSES] == _serialize_span_attribute([ - {"type": "text", "text": "{'location': 'San Francisco, CA'}"} - ]) # we do not record InputJSONDelta because it could contain PII + assert span["data"][SPANDATA.AI_RESPONSES] == _serialize_span_attribute( + [{"type": "text", "text": "{'location': 'San Francisco, CA'}"}] + ) # we do not record InputJSONDelta because it could contain PII else: assert SPANDATA.AI_INPUT_MESSAGES not in span["data"] @@ -667,9 +667,9 @@ async def test_streaming_create_message_with_input_json_delta_async( assert span["data"][SPANDATA.AI_INPUT_MESSAGES] == _serialize_span_attribute( messages ) - assert span["data"][SPANDATA.AI_RESPONSES] == _serialize_span_attribute([ - {"type": "text", "text": "{'location': 'San Francisco, CA'}"} - ]) # we do not record InputJSONDelta because it could contain PII + assert span["data"][SPANDATA.AI_RESPONSES] == _serialize_span_attribute( + [{"type": "text", "text": "{'location': 'San Francisco, CA'}"}] + ) # we do not record InputJSONDelta because it could contain PII else: assert SPANDATA.AI_INPUT_MESSAGES not in span["data"] @@ -832,10 +832,10 @@ def test_add_ai_data_to_span_with_input_json_delta(sentry_init, capture_events): assert len(event["spans"]) == 1 (span,) = event["spans"] - assert span["data"][SPANDATA.AI_RESPONSES] == _serialize_span_attribute([ - {"type": "text", "text": "{'test': 'data','more': 'json'}"} - ]) - assert span["data"]["ai.streaming"] == True + assert span["data"][SPANDATA.AI_RESPONSES] == _serialize_span_attribute( + [{"type": "text", "text": "{'test': 'data','more': 'json'}"}] + ) + assert span["data"]["ai.streaming"] is True assert span["measurements"]["ai_prompt_tokens_used"]["value"] == 10 assert span["measurements"]["ai_completion_tokens_used"]["value"] == 20 assert span["measurements"]["ai_total_tokens_used"]["value"] == 30 From e9cc47ac7c99f9aa0a2f22c250d65d66dc34d0cf Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Mon, 24 Feb 2025 16:00:59 +0100 Subject: [PATCH 181/244] fix import --- sentry_sdk/tracing.py | 1 + 1 file changed, 1 insertion(+) diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py index d67a687944..6bb1eeab76 100644 --- a/sentry_sdk/tracing.py +++ b/sentry_sdk/tracing.py @@ -1,3 +1,4 @@ +from enum import Enum import json from datetime import datetime From 235616e2a2df29290c951a19c0c91aee269b7367 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Mon, 24 Feb 2025 16:02:34 +0100 Subject: [PATCH 182/244] one more merge conflict --- sentry_sdk/integrations/sanic.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sentry_sdk/integrations/sanic.py b/sentry_sdk/integrations/sanic.py index a2bce5676a..06e30ffe31 100644 --- a/sentry_sdk/integrations/sanic.py +++ b/sentry_sdk/integrations/sanic.py @@ -185,7 +185,7 @@ async def _context_enter(request): scope = request.ctx._sentry_scope_manager.__enter__() request.ctx._sentry_scope = scope - scope.set_transaction_name(request.path, TRANSACTION_SOURCE_URL) + scope.set_transaction_name(request.path, TransactionSource.URL) scope.clear_breadcrumbs() scope.add_event_processor(_make_request_processor(weak_request)) From e73ab4be9efd3ad1589d11d2501ef8849e07bec7 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Mon, 24 Feb 2025 16:11:34 +0100 Subject: [PATCH 183/244] Merge master into potel-base (#4093) Co-authored-by: Andrew Liu <159852527+aliu39@users.noreply.github.com> Co-authored-by: Marcelo Galigniana --- .github/workflows/test-integrations-dbs.yml | 6 ++- CHANGELOG.md | 8 ++-- .../templates/test_group.jinja | 3 +- sentry_sdk/integrations/aiohttp.py | 4 +- sentry_sdk/integrations/arq.py | 6 +-- sentry_sdk/integrations/asgi.py | 17 +++---- sentry_sdk/integrations/aws_lambda.py | 6 +-- sentry_sdk/integrations/celery/__init__.py | 6 +-- sentry_sdk/integrations/chalice.py | 4 +- sentry_sdk/integrations/django/__init__.py | 4 +- sentry_sdk/integrations/fastapi.py | 4 +- sentry_sdk/integrations/gcp.py | 4 +- sentry_sdk/integrations/grpc/aio/server.py | 4 +- sentry_sdk/integrations/grpc/server.py | 4 +- sentry_sdk/integrations/huey.py | 4 +- sentry_sdk/integrations/litestar.py | 4 +- sentry_sdk/integrations/ray.py | 6 +-- sentry_sdk/integrations/rq.py | 6 +-- sentry_sdk/integrations/sanic.py | 12 ++--- sentry_sdk/integrations/starlette.py | 9 ++-- sentry_sdk/integrations/starlite.py | 4 +- sentry_sdk/integrations/strawberry.py | 4 +- sentry_sdk/integrations/tornado.py | 9 ++-- sentry_sdk/integrations/wsgi.py | 6 +-- sentry_sdk/tracing.py | 45 +++++++++++-------- tests/integrations/asgi/test_asgi.py | 5 ++- tests/integrations/sanic/test_sanic.py | 8 ++-- tests/test_feature_flags.py | 19 ++++++++ 28 files changed, 124 insertions(+), 97 deletions(-) diff --git a/.github/workflows/test-integrations-dbs.yml b/.github/workflows/test-integrations-dbs.yml index 92778617f4..156e14d03d 100644 --- a/.github/workflows/test-integrations-dbs.yml +++ b/.github/workflows/test-integrations-dbs.yml @@ -55,7 +55,8 @@ jobs: with: python-version: ${{ matrix.python-version }} allow-prereleases: true - - uses: getsentry/action-clickhouse-in-ci@v1.1 + - name: "Setup ClickHouse Server" + uses: getsentry/action-clickhouse-in-ci@v1.5 - name: Setup Test Env run: | pip install "coverage[toml]" tox @@ -140,7 +141,8 @@ jobs: with: python-version: ${{ matrix.python-version }} allow-prereleases: true - - uses: getsentry/action-clickhouse-in-ci@v1.1 + - name: "Setup ClickHouse Server" + uses: getsentry/action-clickhouse-in-ci@v1.5 - name: Setup Test Env run: | pip install "coverage[toml]" tox diff --git a/CHANGELOG.md b/CHANGELOG.md index e6857c34ae..939a612bc0 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2328,7 +2328,7 @@ By: @mgaligniana (#1773) import sentry_sdk from sentry_sdk.integrations.arq import ArqIntegration - from sentry_sdk.tracing import TRANSACTION_SOURCE_COMPONENT + from sentry_sdk.tracing import TransactionSource sentry_sdk.init( dsn="...", @@ -2348,7 +2348,7 @@ By: @mgaligniana (#1773) await ctx['session'].aclose() async def main(): - with sentry_sdk.start_transaction(name="testing_arq_tasks", source=TRANSACTION_SOURCE_COMPONENT): + with sentry_sdk.start_transaction(name="testing_arq_tasks", source=TransactionSource.COMPONENT): redis = await create_pool(RedisSettings()) for url in ('https://facebook.com', 'https://microsoft.com', 'https://github.com', "asdf" ): @@ -2422,7 +2422,7 @@ By: @mgaligniana (#1773) import sentry_sdk from sentry_sdk.integrations.huey import HueyIntegration - from sentry_sdk.tracing import TRANSACTION_SOURCE_COMPONENT, Transaction + from sentry_sdk.tracing import TransactionSource, Transaction def main(): @@ -2434,7 +2434,7 @@ By: @mgaligniana (#1773) traces_sample_rate=1.0, ) - with sentry_sdk.start_transaction(name="testing_huey_tasks", source=TRANSACTION_SOURCE_COMPONENT): + with sentry_sdk.start_transaction(name="testing_huey_tasks", source=TransactionSource.COMPONENT): r = add_numbers(1, 2) if __name__ == "__main__": diff --git a/scripts/split_tox_gh_actions/templates/test_group.jinja b/scripts/split_tox_gh_actions/templates/test_group.jinja index f94d642e32..adf530b5ad 100644 --- a/scripts/split_tox_gh_actions/templates/test_group.jinja +++ b/scripts/split_tox_gh_actions/templates/test_group.jinja @@ -47,7 +47,8 @@ python-version: {% raw %}${{ matrix.python-version }}{% endraw %} allow-prereleases: true {% if needs_clickhouse %} - - uses: getsentry/action-clickhouse-in-ci@v1.1 + - name: "Setup ClickHouse Server" + uses: getsentry/action-clickhouse-in-ci@v1.5 {% endif %} {% if needs_redis %} diff --git a/sentry_sdk/integrations/aiohttp.py b/sentry_sdk/integrations/aiohttp.py index 9ebef2f9c6..6efdd5d883 100644 --- a/sentry_sdk/integrations/aiohttp.py +++ b/sentry_sdk/integrations/aiohttp.py @@ -20,7 +20,7 @@ from sentry_sdk.tracing import ( BAGGAGE_HEADER_NAME, SOURCE_FOR_STYLE, - TRANSACTION_SOURCE_ROUTE, + TransactionSource, ) from sentry_sdk.tracing_utils import should_propagate_trace from sentry_sdk.utils import ( @@ -136,7 +136,7 @@ async def sentry_app_handle(self, request, *args, **kwargs): # If this transaction name makes it to the UI, AIOHTTP's # URL resolver did not find a route or died trying. name="generic AIOHTTP request", - source=TRANSACTION_SOURCE_ROUTE, + source=TransactionSource.ROUTE, origin=AioHttpIntegration.origin, attributes=_prepopulate_attributes(request), ) as span: diff --git a/sentry_sdk/integrations/arq.py b/sentry_sdk/integrations/arq.py index 2f3b063d88..f656b6ece3 100644 --- a/sentry_sdk/integrations/arq.py +++ b/sentry_sdk/integrations/arq.py @@ -5,7 +5,7 @@ from sentry_sdk.integrations import _check_minimum_version, DidNotEnable, Integration from sentry_sdk.integrations.logging import ignore_logger from sentry_sdk.scope import should_send_default_pii -from sentry_sdk.tracing import TRANSACTION_SOURCE_TASK +from sentry_sdk.tracing import TransactionSource from sentry_sdk.utils import ( capture_internal_exceptions, ensure_integration_enabled, @@ -103,14 +103,14 @@ async def _sentry_run_job(self, job_id, score): scope._name = "arq" scope.set_transaction_name( DEFAULT_TRANSACTION_NAME, - source=TRANSACTION_SOURCE_TASK, + source=TransactionSource.TASK, ) scope.clear_breadcrumbs() with sentry_sdk.start_span( op=OP.QUEUE_TASK_ARQ, name=DEFAULT_TRANSACTION_NAME, - source=TRANSACTION_SOURCE_TASK, + source=TransactionSource.TASK, origin=ArqIntegration.origin, ) as span: return_value = await old_run_job(self, job_id, score) diff --git a/sentry_sdk/integrations/asgi.py b/sentry_sdk/integrations/asgi.py index 2a8bbe5091..a5129debbf 100644 --- a/sentry_sdk/integrations/asgi.py +++ b/sentry_sdk/integrations/asgi.py @@ -25,10 +25,7 @@ from sentry_sdk.sessions import track_session from sentry_sdk.tracing import ( SOURCE_FOR_STYLE, - TRANSACTION_SOURCE_ROUTE, - TRANSACTION_SOURCE_URL, - TRANSACTION_SOURCE_COMPONENT, - TRANSACTION_SOURCE_CUSTOM, + TransactionSource, ) from sentry_sdk.utils import ( ContextVar, @@ -265,9 +262,9 @@ def event_processor(self, event, hint, asgi_scope): and "source" in event["transaction_info"] and event["transaction_info"]["source"] in [ - TRANSACTION_SOURCE_COMPONENT, - TRANSACTION_SOURCE_ROUTE, - TRANSACTION_SOURCE_CUSTOM, + TransactionSource.COMPONENT, + TransactionSource.ROUTE, + TransactionSource.CUSTOM, ] ) if not already_set: @@ -306,7 +303,7 @@ def _get_transaction_name_and_source(self, transaction_style, asgi_scope): name = transaction_from_function(endpoint) or "" else: name = _get_url(asgi_scope, "http" if ty == "http" else "ws", host=None) - source = TRANSACTION_SOURCE_URL + source = TransactionSource.URL elif transaction_style == "url": # FastAPI includes the route object in the scope to let Sentry extract the @@ -318,11 +315,11 @@ def _get_transaction_name_and_source(self, transaction_style, asgi_scope): name = path else: name = _get_url(asgi_scope, "http" if ty == "http" else "ws", host=None) - source = TRANSACTION_SOURCE_URL + source = TransactionSource.URL if name is None: name = _DEFAULT_TRANSACTION_NAME - source = TRANSACTION_SOURCE_ROUTE + source = TransactionSource.ROUTE return name, source return name, source diff --git a/sentry_sdk/integrations/aws_lambda.py b/sentry_sdk/integrations/aws_lambda.py index 82dedb3191..66d14b22a3 100644 --- a/sentry_sdk/integrations/aws_lambda.py +++ b/sentry_sdk/integrations/aws_lambda.py @@ -10,7 +10,7 @@ import sentry_sdk from sentry_sdk.consts import OP from sentry_sdk.scope import should_send_default_pii -from sentry_sdk.tracing import TRANSACTION_SOURCE_COMPONENT +from sentry_sdk.tracing import TransactionSource from sentry_sdk.utils import ( AnnotatedValue, capture_internal_exceptions, @@ -125,7 +125,7 @@ def sentry_handler(aws_event, aws_context, *args, **kwargs): with sentry_sdk.isolation_scope() as scope: scope.set_transaction_name( - aws_context.function_name, source=TRANSACTION_SOURCE_COMPONENT + aws_context.function_name, source=TransactionSource.COMPONENT ) timeout_thread = None with capture_internal_exceptions(): @@ -170,7 +170,7 @@ def sentry_handler(aws_event, aws_context, *args, **kwargs): with sentry_sdk.start_span( op=OP.FUNCTION_AWS, name=aws_context.function_name, - source=TRANSACTION_SOURCE_COMPONENT, + source=TransactionSource.COMPONENT, origin=AwsLambdaIntegration.origin, attributes=_prepopulate_attributes(request_data, aws_context), ): diff --git a/sentry_sdk/integrations/celery/__init__.py b/sentry_sdk/integrations/celery/__init__.py index 6b74af1cb7..238704fa68 100644 --- a/sentry_sdk/integrations/celery/__init__.py +++ b/sentry_sdk/integrations/celery/__init__.py @@ -13,7 +13,7 @@ ) from sentry_sdk.integrations.celery.utils import _now_seconds_since_epoch from sentry_sdk.integrations.logging import ignore_logger -from sentry_sdk.tracing import BAGGAGE_HEADER_NAME, TRANSACTION_SOURCE_TASK +from sentry_sdk.tracing import BAGGAGE_HEADER_NAME, TransactionSource from sentry_sdk.tracing_utils import Baggage from sentry_sdk.utils import ( capture_internal_exceptions, @@ -306,7 +306,7 @@ def _inner(*args, **kwargs): with isolation_scope() as scope: scope._name = "celery" scope.clear_breadcrumbs() - scope.set_transaction_name(task.name, source=TRANSACTION_SOURCE_TASK) + scope.set_transaction_name(task.name, source=TransactionSource.TASK) scope.add_event_processor(_make_event_processor(task, *args, **kwargs)) # Celery task objects are not a thing to be trusted. Even @@ -317,7 +317,7 @@ def _inner(*args, **kwargs): with sentry_sdk.start_span( op=OP.QUEUE_TASK_CELERY, name=task.name, - source=TRANSACTION_SOURCE_TASK, + source=TransactionSource.TASK, origin=CeleryIntegration.origin, # for some reason, args[1] is a list if non-empty but a # tuple if empty diff --git a/sentry_sdk/integrations/chalice.py b/sentry_sdk/integrations/chalice.py index 0754d1f13b..947e41ebf7 100644 --- a/sentry_sdk/integrations/chalice.py +++ b/sentry_sdk/integrations/chalice.py @@ -4,7 +4,7 @@ import sentry_sdk from sentry_sdk.integrations import Integration, DidNotEnable from sentry_sdk.integrations.aws_lambda import _make_request_event_processor -from sentry_sdk.tracing import TRANSACTION_SOURCE_COMPONENT +from sentry_sdk.tracing import TransactionSource from sentry_sdk.utils import ( capture_internal_exceptions, event_from_exception, @@ -67,7 +67,7 @@ def wrapped_view_function(**function_args): configured_time = app.lambda_context.get_remaining_time_in_millis() scope.set_transaction_name( app.lambda_context.function_name, - source=TRANSACTION_SOURCE_COMPONENT, + source=TransactionSource.COMPONENT, ) scope.add_event_processor( diff --git a/sentry_sdk/integrations/django/__init__.py b/sentry_sdk/integrations/django/__init__.py index c7f5a874cf..e8aa673787 100644 --- a/sentry_sdk/integrations/django/__init__.py +++ b/sentry_sdk/integrations/django/__init__.py @@ -8,7 +8,7 @@ from sentry_sdk.consts import OP, SPANDATA from sentry_sdk.scope import add_global_event_processor, should_send_default_pii from sentry_sdk.serializer import add_global_repr_processor -from sentry_sdk.tracing import SOURCE_FOR_STYLE, TRANSACTION_SOURCE_URL +from sentry_sdk.tracing import SOURCE_FOR_STYLE, TransactionSource from sentry_sdk.tracing_utils import add_query_source, record_sql_queries from sentry_sdk.utils import ( AnnotatedValue, @@ -398,7 +398,7 @@ def _set_transaction_name_and_source(scope, transaction_style, request): if transaction_name is None: transaction_name = request.path_info - source = TRANSACTION_SOURCE_URL + source = TransactionSource.URL else: source = SOURCE_FOR_STYLE[transaction_style] diff --git a/sentry_sdk/integrations/fastapi.py b/sentry_sdk/integrations/fastapi.py index 8877925a36..76c6adee0f 100644 --- a/sentry_sdk/integrations/fastapi.py +++ b/sentry_sdk/integrations/fastapi.py @@ -5,7 +5,7 @@ import sentry_sdk from sentry_sdk.integrations import DidNotEnable from sentry_sdk.scope import should_send_default_pii -from sentry_sdk.tracing import SOURCE_FOR_STYLE, TRANSACTION_SOURCE_ROUTE +from sentry_sdk.tracing import SOURCE_FOR_STYLE, TransactionSource from sentry_sdk.utils import ( transaction_from_function, logger, @@ -61,7 +61,7 @@ def _set_transaction_name_and_source(scope, transaction_style, request): if not name: name = _DEFAULT_TRANSACTION_NAME - source = TRANSACTION_SOURCE_ROUTE + source = TransactionSource.ROUTE else: source = SOURCE_FOR_STYLE[transaction_style] diff --git a/sentry_sdk/integrations/gcp.py b/sentry_sdk/integrations/gcp.py index ec626ed699..97b72ff1ce 100644 --- a/sentry_sdk/integrations/gcp.py +++ b/sentry_sdk/integrations/gcp.py @@ -12,7 +12,7 @@ _request_headers_to_span_attributes, ) from sentry_sdk.scope import should_send_default_pii -from sentry_sdk.tracing import TRANSACTION_SOURCE_COMPONENT +from sentry_sdk.tracing import TransactionSource from sentry_sdk.utils import ( AnnotatedValue, capture_internal_exceptions, @@ -90,7 +90,7 @@ def sentry_func(functionhandler, gcp_event, *args, **kwargs): with sentry_sdk.start_span( op=OP.FUNCTION_GCP, name=environ.get("FUNCTION_NAME", ""), - source=TRANSACTION_SOURCE_COMPONENT, + source=TransactionSource.COMPONENT, origin=GcpIntegration.origin, attributes=_prepopulate_attributes(gcp_event), ): diff --git a/sentry_sdk/integrations/grpc/aio/server.py b/sentry_sdk/integrations/grpc/aio/server.py index 4d54b0605c..91c2e9d74f 100644 --- a/sentry_sdk/integrations/grpc/aio/server.py +++ b/sentry_sdk/integrations/grpc/aio/server.py @@ -2,7 +2,7 @@ from sentry_sdk.consts import OP from sentry_sdk.integrations import DidNotEnable from sentry_sdk.integrations.grpc.consts import SPAN_ORIGIN -from sentry_sdk.tracing import TRANSACTION_SOURCE_CUSTOM +from sentry_sdk.tracing import TransactionSource from sentry_sdk.utils import event_from_exception from typing import TYPE_CHECKING @@ -48,7 +48,7 @@ async def wrapped(request, context): with sentry_sdk.start_span( op=OP.GRPC_SERVER, name=name, - source=TRANSACTION_SOURCE_CUSTOM, + source=TransactionSource.CUSTOM, origin=SPAN_ORIGIN, ): try: diff --git a/sentry_sdk/integrations/grpc/server.py b/sentry_sdk/integrations/grpc/server.py index d12b43b92b..582ef6e24a 100644 --- a/sentry_sdk/integrations/grpc/server.py +++ b/sentry_sdk/integrations/grpc/server.py @@ -2,7 +2,7 @@ from sentry_sdk.consts import OP from sentry_sdk.integrations import DidNotEnable from sentry_sdk.integrations.grpc.consts import SPAN_ORIGIN -from sentry_sdk.tracing import TRANSACTION_SOURCE_CUSTOM +from sentry_sdk.tracing import TransactionSource from typing import TYPE_CHECKING @@ -42,7 +42,7 @@ def behavior(request, context): with sentry_sdk.start_span( op=OP.GRPC_SERVER, name=name, - source=TRANSACTION_SOURCE_CUSTOM, + source=TransactionSource.CUSTOM, origin=SPAN_ORIGIN, ): try: diff --git a/sentry_sdk/integrations/huey.py b/sentry_sdk/integrations/huey.py index 4dcff8513f..8e9d45a526 100644 --- a/sentry_sdk/integrations/huey.py +++ b/sentry_sdk/integrations/huey.py @@ -9,7 +9,7 @@ from sentry_sdk.tracing import ( BAGGAGE_HEADER_NAME, SENTRY_TRACE_HEADER_NAME, - TRANSACTION_SOURCE_TASK, + TransactionSource, ) from sentry_sdk.utils import ( capture_internal_exceptions, @@ -165,7 +165,7 @@ def _sentry_execute(self, task, timestamp=None): with sentry_sdk.start_span( name=task.name, op=OP.QUEUE_TASK_HUEY, - source=TRANSACTION_SOURCE_TASK, + source=TransactionSource.TASK, origin=HueyIntegration.origin, ): return old_execute(self, task, timestamp) diff --git a/sentry_sdk/integrations/litestar.py b/sentry_sdk/integrations/litestar.py index 4feb9f775e..484fea46c8 100644 --- a/sentry_sdk/integrations/litestar.py +++ b/sentry_sdk/integrations/litestar.py @@ -9,7 +9,7 @@ from sentry_sdk.integrations.asgi import SentryAsgiMiddleware from sentry_sdk.integrations.logging import ignore_logger from sentry_sdk.scope import should_send_default_pii -from sentry_sdk.tracing import SOURCE_FOR_STYLE, TRANSACTION_SOURCE_ROUTE +from sentry_sdk.tracing import TransactionSource, SOURCE_FOR_STYLE from sentry_sdk.utils import ( ensure_integration_enabled, event_from_exception, @@ -252,7 +252,7 @@ def event_processor(event, _): if not tx_name: tx_name = _DEFAULT_TRANSACTION_NAME - tx_info = {"source": TRANSACTION_SOURCE_ROUTE} + tx_info = {"source": TransactionSource.ROUTE} event.update( { diff --git a/sentry_sdk/integrations/ray.py b/sentry_sdk/integrations/ray.py index e033d93335..a0ec9713c1 100644 --- a/sentry_sdk/integrations/ray.py +++ b/sentry_sdk/integrations/ray.py @@ -4,7 +4,7 @@ import sentry_sdk from sentry_sdk.consts import OP, SPANSTATUS from sentry_sdk.integrations import _check_minimum_version, DidNotEnable, Integration -from sentry_sdk.tracing import TRANSACTION_SOURCE_TASK +from sentry_sdk.tracing import TransactionSource from sentry_sdk.utils import ( event_from_exception, logger, @@ -63,14 +63,14 @@ def _f(*f_args, _tracing=None, **f_kwargs): root_span_name = qualname_from_function(f) or DEFAULT_TRANSACTION_NAME sentry_sdk.get_current_scope().set_transaction_name( root_span_name, - source=TRANSACTION_SOURCE_TASK, + source=TransactionSource.TASK, ) with sentry_sdk.continue_trace(_tracing or {}): with sentry_sdk.start_span( op=OP.QUEUE_TASK_RAY, name=root_span_name, origin=RayIntegration.origin, - source=TRANSACTION_SOURCE_TASK, + source=TransactionSource.TASK, ) as root_span: try: result = f(*f_args, **f_kwargs) diff --git a/sentry_sdk/integrations/rq.py b/sentry_sdk/integrations/rq.py index d87a4fbded..33910ed476 100644 --- a/sentry_sdk/integrations/rq.py +++ b/sentry_sdk/integrations/rq.py @@ -4,7 +4,7 @@ from sentry_sdk.consts import OP from sentry_sdk.integrations import _check_minimum_version, DidNotEnable, Integration from sentry_sdk.integrations.logging import ignore_logger -from sentry_sdk.tracing import TRANSACTION_SOURCE_TASK +from sentry_sdk.tracing import TransactionSource from sentry_sdk.utils import ( capture_internal_exceptions, ensure_integration_enabled, @@ -66,7 +66,7 @@ def sentry_patched_perform_job(self, job, queue, *args, **kwargs): transaction_name = DEFAULT_TRANSACTION_NAME scope.set_transaction_name( - transaction_name, source=TRANSACTION_SOURCE_TASK + transaction_name, source=TransactionSource.TASK ) scope.clear_breadcrumbs() scope.add_event_processor(_make_event_processor(weakref.ref(job))) @@ -77,7 +77,7 @@ def sentry_patched_perform_job(self, job, queue, *args, **kwargs): with sentry_sdk.start_span( op=OP.QUEUE_TASK_RQ, name=transaction_name, - source=TRANSACTION_SOURCE_TASK, + source=TransactionSource.TASK, origin=RqIntegration.origin, attributes=_prepopulate_attributes(job, queue), ): diff --git a/sentry_sdk/integrations/sanic.py b/sentry_sdk/integrations/sanic.py index 6143313cc4..06e30ffe31 100644 --- a/sentry_sdk/integrations/sanic.py +++ b/sentry_sdk/integrations/sanic.py @@ -8,7 +8,7 @@ from sentry_sdk.integrations import _check_minimum_version, Integration, DidNotEnable from sentry_sdk.integrations._wsgi_common import RequestExtractor, _filter_headers from sentry_sdk.integrations.logging import ignore_logger -from sentry_sdk.tracing import TRANSACTION_SOURCE_COMPONENT, TRANSACTION_SOURCE_URL +from sentry_sdk.tracing import TransactionSource from sentry_sdk.utils import ( capture_internal_exceptions, ensure_integration_enabled, @@ -185,7 +185,7 @@ async def _context_enter(request): scope = request.ctx._sentry_scope_manager.__enter__() request.ctx._sentry_scope = scope - scope.set_transaction_name(request.path, TRANSACTION_SOURCE_URL) + scope.set_transaction_name(request.path, TransactionSource.URL) scope.clear_breadcrumbs() scope.add_event_processor(_make_request_processor(weak_request)) @@ -198,7 +198,7 @@ async def _context_enter(request): op=OP.HTTP_SERVER, # Unless the request results in a 404 error, the name and source will get overwritten in _set_transaction name=request.path, - source=TRANSACTION_SOURCE_URL, + source=TransactionSource.URL, origin=SanicIntegration.origin, ).__enter__() @@ -239,7 +239,7 @@ async def _set_transaction(request, route, **_): with capture_internal_exceptions(): scope = sentry_sdk.get_current_scope() route_name = route.name.replace(request.app.name, "").strip(".") - scope.set_transaction_name(route_name, source=TRANSACTION_SOURCE_COMPONENT) + scope.set_transaction_name(route_name, source=TransactionSource.COMPONENT) def _sentry_error_handler_lookup(self, exception, *args, **kwargs): @@ -314,11 +314,11 @@ def _legacy_router_get(self, *args): sanic_route = sanic_route[len(sanic_app_name) + 1 :] scope.set_transaction_name( - sanic_route, source=TRANSACTION_SOURCE_COMPONENT + sanic_route, source=TransactionSource.COMPONENT ) else: scope.set_transaction_name( - rv[0].__name__, source=TRANSACTION_SOURCE_COMPONENT + rv[0].__name__, source=TransactionSource.COMPONENT ) return rv diff --git a/sentry_sdk/integrations/starlette.py b/sentry_sdk/integrations/starlette.py index c8a415a64d..cb2da74a04 100644 --- a/sentry_sdk/integrations/starlette.py +++ b/sentry_sdk/integrations/starlette.py @@ -21,8 +21,7 @@ from sentry_sdk.scope import should_send_default_pii from sentry_sdk.tracing import ( SOURCE_FOR_STYLE, - TRANSACTION_SOURCE_COMPONENT, - TRANSACTION_SOURCE_ROUTE, + TransactionSource, ) from sentry_sdk.utils import ( AnnotatedValue, @@ -717,7 +716,7 @@ def _set_transaction_name_and_source(scope, transaction_style, request): if name is None: name = _DEFAULT_TRANSACTION_NAME - source = TRANSACTION_SOURCE_ROUTE + source = TransactionSource.ROUTE scope.set_transaction_name(name, source=source) logger.debug( @@ -732,9 +731,9 @@ def _get_transaction_from_middleware(app, asgi_scope, integration): if integration.transaction_style == "endpoint": name = transaction_from_function(app.__class__) - source = TRANSACTION_SOURCE_COMPONENT + source = TransactionSource.COMPONENT elif integration.transaction_style == "url": name = _transaction_name_from_router(asgi_scope) - source = TRANSACTION_SOURCE_ROUTE + source = TransactionSource.ROUTE return name, source diff --git a/sentry_sdk/integrations/starlite.py b/sentry_sdk/integrations/starlite.py index 66f5025c26..dae105447b 100644 --- a/sentry_sdk/integrations/starlite.py +++ b/sentry_sdk/integrations/starlite.py @@ -3,7 +3,7 @@ from sentry_sdk.integrations import DidNotEnable, Integration from sentry_sdk.integrations.asgi import SentryAsgiMiddleware from sentry_sdk.scope import should_send_default_pii -from sentry_sdk.tracing import SOURCE_FOR_STYLE, TRANSACTION_SOURCE_ROUTE +from sentry_sdk.tracing import SOURCE_FOR_STYLE, TransactionSource from sentry_sdk.utils import ( ensure_integration_enabled, event_from_exception, @@ -238,7 +238,7 @@ def event_processor(event, _): if not tx_name: tx_name = _DEFAULT_TRANSACTION_NAME - tx_info = {"source": TRANSACTION_SOURCE_ROUTE} + tx_info = {"source": TransactionSource.ROUTE} event.update( { diff --git a/sentry_sdk/integrations/strawberry.py b/sentry_sdk/integrations/strawberry.py index b2071ed6b5..608dfcbb8c 100644 --- a/sentry_sdk/integrations/strawberry.py +++ b/sentry_sdk/integrations/strawberry.py @@ -7,7 +7,7 @@ from sentry_sdk.integrations import _check_minimum_version, Integration, DidNotEnable from sentry_sdk.integrations.logging import ignore_logger from sentry_sdk.scope import should_send_default_pii -from sentry_sdk.tracing import TRANSACTION_SOURCE_COMPONENT +from sentry_sdk.tracing import TransactionSource from sentry_sdk.utils import ( capture_internal_exceptions, ensure_integration_enabled, @@ -196,7 +196,7 @@ def on_operation(self): sentry_sdk.get_current_scope().set_transaction_name( self._operation_name, - source=TRANSACTION_SOURCE_COMPONENT, + source=TransactionSource.COMPONENT, ) root_span = graphql_span.root_span diff --git a/sentry_sdk/integrations/tornado.py b/sentry_sdk/integrations/tornado.py index 5ecd71bced..70fb21ee14 100644 --- a/sentry_sdk/integrations/tornado.py +++ b/sentry_sdk/integrations/tornado.py @@ -5,10 +5,7 @@ import sentry_sdk from sentry_sdk.consts import OP from sentry_sdk.scope import should_send_default_pii -from sentry_sdk.tracing import ( - TRANSACTION_SOURCE_COMPONENT, - TRANSACTION_SOURCE_ROUTE, -) +from sentry_sdk.tracing import TransactionSource from sentry_sdk.utils import ( HAS_REAL_CONTEXTVARS, CONTEXTVARS_ERROR_MESSAGE, @@ -131,7 +128,7 @@ def _handle_request_impl(self): # sentry_urldispatcher_resolve is responsible for # setting a transaction name later. name="generic Tornado request", - source=TRANSACTION_SOURCE_ROUTE, + source=TransactionSource.ROUTE, origin=TornadoIntegration.origin, attributes=_prepopulate_attributes(self.request), ): @@ -166,7 +163,7 @@ def tornado_processor(event, hint): with capture_internal_exceptions(): method = getattr(handler, handler.request.method.lower()) event["transaction"] = transaction_from_function(method) or "" - event["transaction_info"] = {"source": TRANSACTION_SOURCE_COMPONENT} + event["transaction_info"] = {"source": TransactionSource.COMPONENT} with capture_internal_exceptions(): extractor = TornadoRequestExtractor(request) diff --git a/sentry_sdk/integrations/wsgi.py b/sentry_sdk/integrations/wsgi.py index 74051df0db..85983b18c4 100644 --- a/sentry_sdk/integrations/wsgi.py +++ b/sentry_sdk/integrations/wsgi.py @@ -11,7 +11,7 @@ _request_headers_to_span_attributes, ) from sentry_sdk.sessions import track_session -from sentry_sdk.tracing import Span, TRANSACTION_SOURCE_ROUTE +from sentry_sdk.tracing import Span, TransactionSource from sentry_sdk.utils import ( ContextVar, capture_internal_exceptions, @@ -108,7 +108,7 @@ def __call__(self, environ, start_response): try: with sentry_sdk.isolation_scope() as scope: scope.set_transaction_name( - DEFAULT_TRANSACTION_NAME, source=TRANSACTION_SOURCE_ROUTE + DEFAULT_TRANSACTION_NAME, source=TransactionSource.ROUTE ) with track_session(scope, session_mode="request"): @@ -127,7 +127,7 @@ def __call__(self, environ, start_response): with sentry_sdk.start_span( op=OP.HTTP_SERVER, name=DEFAULT_TRANSACTION_NAME, - source=TRANSACTION_SOURCE_ROUTE, + source=TransactionSource.ROUTE, origin=self.span_origin, attributes=_prepopulate_attributes( environ, self.use_x_forwarded_for diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py index 0272449198..6bb1eeab76 100644 --- a/sentry_sdk/tracing.py +++ b/sentry_sdk/tracing.py @@ -1,3 +1,4 @@ +from enum import Enum import json from datetime import datetime @@ -23,6 +24,7 @@ from typing import TYPE_CHECKING, cast + if TYPE_CHECKING: from collections.abc import Callable from typing import Any @@ -127,30 +129,37 @@ class TransactionKwargs(SpanKwargs, total=False): BAGGAGE_HEADER_NAME = "baggage" SENTRY_TRACE_HEADER_NAME = "sentry-trace" + # Transaction source # see https://develop.sentry.dev/sdk/event-payloads/transaction/#transaction-annotations -TRANSACTION_SOURCE_CUSTOM = "custom" -TRANSACTION_SOURCE_URL = "url" -TRANSACTION_SOURCE_ROUTE = "route" -TRANSACTION_SOURCE_VIEW = "view" -TRANSACTION_SOURCE_COMPONENT = "component" -TRANSACTION_SOURCE_TASK = "task" +class TransactionSource(str, Enum): + COMPONENT = "component" + CUSTOM = "custom" + ROUTE = "route" + TASK = "task" + URL = "url" + VIEW = "view" + + def __str__(self): + # type: () -> str + return self.value + # These are typically high cardinality and the server hates them LOW_QUALITY_TRANSACTION_SOURCES = [ - TRANSACTION_SOURCE_URL, + TransactionSource.URL, ] SOURCE_FOR_STYLE = { - "endpoint": TRANSACTION_SOURCE_COMPONENT, - "function_name": TRANSACTION_SOURCE_COMPONENT, - "handler_name": TRANSACTION_SOURCE_COMPONENT, - "method_and_path_pattern": TRANSACTION_SOURCE_ROUTE, - "path": TRANSACTION_SOURCE_URL, - "route_name": TRANSACTION_SOURCE_COMPONENT, - "route_pattern": TRANSACTION_SOURCE_ROUTE, - "uri_template": TRANSACTION_SOURCE_ROUTE, - "url": TRANSACTION_SOURCE_ROUTE, + "endpoint": TransactionSource.COMPONENT, + "function_name": TransactionSource.COMPONENT, + "handler_name": TransactionSource.COMPONENT, + "method_and_path_pattern": TransactionSource.ROUTE, + "path": TransactionSource.URL, + "route_name": TransactionSource.COMPONENT, + "route_pattern": TransactionSource.ROUTE, + "uri_template": TransactionSource.ROUTE, + "url": TransactionSource.ROUTE, } DEFAULT_SPAN_ORIGIN = "manual" @@ -303,7 +312,7 @@ def __init__( start_timestamp=None, # type: Optional[Union[datetime, float]] origin=None, # type: Optional[str] name=None, # type: Optional[str] - source=TRANSACTION_SOURCE_CUSTOM, # type: str + source=TransactionSource.CUSTOM, # type: str attributes=None, # type: Optional[dict[str, Any]] only_if_parent=False, # type: bool parent_span=None, # type: Optional[Span] @@ -566,7 +575,7 @@ def source(self): from sentry_sdk.integrations.opentelemetry.consts import SentrySpanAttribute return ( - self.get_attribute(SentrySpanAttribute.SOURCE) or TRANSACTION_SOURCE_CUSTOM + self.get_attribute(SentrySpanAttribute.SOURCE) or TransactionSource.CUSTOM ) @source.setter diff --git a/tests/integrations/asgi/test_asgi.py b/tests/integrations/asgi/test_asgi.py index 96d4e709f1..d0ddef8611 100644 --- a/tests/integrations/asgi/test_asgi.py +++ b/tests/integrations/asgi/test_asgi.py @@ -3,6 +3,7 @@ import pytest import sentry_sdk from sentry_sdk import capture_message +from sentry_sdk.tracing import TransactionSource from sentry_sdk.integrations._asgi_common import _get_ip, _get_headers from sentry_sdk.integrations.asgi import SentryAsgiMiddleware, _looks_like_asgi3 @@ -129,7 +130,9 @@ async def app(scope, receive, send): @pytest.fixture def asgi3_custom_transaction_app(): async def app(scope, receive, send): - sentry_sdk.get_current_scope().set_transaction_name("foobar", source="custom") + sentry_sdk.get_current_scope().set_transaction_name( + "foobar", source=TransactionSource.CUSTOM + ) await send( { "type": "http.response.start", diff --git a/tests/integrations/sanic/test_sanic.py b/tests/integrations/sanic/test_sanic.py index a3fc5a7652..05b23cb215 100644 --- a/tests/integrations/sanic/test_sanic.py +++ b/tests/integrations/sanic/test_sanic.py @@ -10,7 +10,7 @@ import sentry_sdk from sentry_sdk import capture_message from sentry_sdk.integrations.sanic import SanicIntegration -from sentry_sdk.tracing import TRANSACTION_SOURCE_COMPONENT, TRANSACTION_SOURCE_URL +from sentry_sdk.tracing import TransactionSource from sanic import Sanic, request, response, __version__ as SANIC_VERSION_RAW from sanic.response import HTTPResponse @@ -372,7 +372,7 @@ def __init__( url="/message", expected_status=200, expected_transaction_name="hi", - expected_source=TRANSACTION_SOURCE_COMPONENT, + expected_source=TransactionSource.COMPONENT, ), TransactionTestConfig( # Transaction still recorded when we have an internal server error @@ -380,7 +380,7 @@ def __init__( url="/500", expected_status=500, expected_transaction_name="fivehundred", - expected_source=TRANSACTION_SOURCE_COMPONENT, + expected_source=TransactionSource.COMPONENT, ), TransactionTestConfig( # By default, no transaction when we have a 404 error @@ -396,7 +396,7 @@ def __init__( url="/404", expected_status=404, expected_transaction_name="/404", - expected_source=TRANSACTION_SOURCE_URL, + expected_source=TransactionSource.URL, ), TransactionTestConfig( # Transaction can be suppressed for other HTTP statuses, too, by passing config to the integration diff --git a/tests/test_feature_flags.py b/tests/test_feature_flags.py index 4469b5c2ca..0df30bd0ea 100644 --- a/tests/test_feature_flags.py +++ b/tests/test_feature_flags.py @@ -170,6 +170,25 @@ def test_flag_tracking(): {"flag": "f", "result": False}, ] + # Test updates + buffer.set("e", True) + buffer.set("e", False) + buffer.set("e", True) + flags = buffer.get() + assert flags == [ + {"flag": "d", "result": False}, + {"flag": "f", "result": False}, + {"flag": "e", "result": True}, + ] + + buffer.set("d", True) + flags = buffer.get() + assert flags == [ + {"flag": "f", "result": False}, + {"flag": "e", "result": True}, + {"flag": "d", "result": True}, + ] + def test_flag_buffer_concurrent_access(): buffer = FlagBuffer(capacity=100) From 9ddfa938bca5f754c6ede236484379d63b431706 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Tue, 25 Feb 2025 14:03:17 +0100 Subject: [PATCH 184/244] Tread `SystemExit(0)` not as a span status of 'internal_error' (#4094) Also make sure, that the span status is not set as a tag on the span. SDKs should not set tags at all by default (only users are allowed to set tags) Fixes #4065 --- .../integrations/opentelemetry/span_processor.py | 3 --- sentry_sdk/tracing.py | 3 ++- sentry_sdk/utils.py | 1 - tests/tracing/test_integration_tests.py | 11 +++++------ 4 files changed, 7 insertions(+), 11 deletions(-) diff --git a/sentry_sdk/integrations/opentelemetry/span_processor.py b/sentry_sdk/integrations/opentelemetry/span_processor.py index a3cf545daf..bf3ff62d1b 100644 --- a/sentry_sdk/integrations/opentelemetry/span_processor.py +++ b/sentry_sdk/integrations/opentelemetry/span_processor.py @@ -261,9 +261,6 @@ def _span_to_json(self, span): } ) - if status: - span_json.setdefault("tags", {})["status"] = status - if parent_span_id: span_json["parent_span_id"] = parent_span_id diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py index 6bb1eeab76..744003eaba 100644 --- a/sentry_sdk/tracing.py +++ b/sentry_sdk/tracing.py @@ -20,6 +20,7 @@ _serialize_span_attribute, get_current_thread_meta, logger, + should_be_treated_as_error, ) from typing import TYPE_CHECKING, cast @@ -424,7 +425,7 @@ def __enter__(self): def __exit__(self, ty, value, tb): # type: (Optional[Any], Optional[Any], Optional[Any]) -> None - if value is not None: + if value is not None and should_be_treated_as_error(ty, value): self.set_status(SPANSTATUS.INTERNAL_ERROR) else: status_unset = ( diff --git a/sentry_sdk/utils.py b/sentry_sdk/utils.py index ce1437222c..d7aa316e40 100644 --- a/sentry_sdk/utils.py +++ b/sentry_sdk/utils.py @@ -1879,7 +1879,6 @@ def datetime_from_isoformat(value): return result.astimezone(timezone.utc) -# TODO-neel-potel use in span status def should_be_treated_as_error(ty, value): # type: (Any, Any) -> bool if ty == SystemExit and hasattr(value, "code") and value.code in (0, None): diff --git a/tests/tracing/test_integration_tests.py b/tests/tracing/test_integration_tests.py index 0a96e859e4..22deabb692 100644 --- a/tests/tracing/test_integration_tests.py +++ b/tests/tracing/test_integration_tests.py @@ -37,11 +37,10 @@ def test_basic(sentry_init, capture_events, sample_rate): span1, span2 = event["spans"] parent_span = event - assert span1["tags"]["status"] == "internal_error" assert span1["status"] == "internal_error" assert span1["op"] == "foo" assert span1["description"] == "foodesc" - assert span2["tags"]["status"] == "ok" + assert span2["status"] == "ok" assert span2["op"] == "bar" assert span2["description"] == "bardesc" assert parent_span["transaction"] == "hi" @@ -253,8 +252,8 @@ def test_non_error_exceptions( sentry_init(traces_sample_rate=1.0) events = capture_events() - with start_span(name="hi") as span: - span.set_status(SPANSTATUS.OK) + with start_span(name="hi") as root_span: + root_span.set_status(SPANSTATUS.OK) with pytest.raises(exception_cls): with start_span(op="foo", name="foodesc"): raise exception_cls(exception_value) @@ -264,7 +263,7 @@ def test_non_error_exceptions( span = event["spans"][0] assert "status" not in span.get("tags", {}) - assert "status" not in event["tags"] + assert "status" not in event.get("tags", {}) assert event["contexts"]["trace"]["status"] == "ok" @@ -289,5 +288,5 @@ def test_good_sysexit_doesnt_fail_transaction( span = event["spans"][0] assert "status" not in span.get("tags", {}) - assert "status" not in event["tags"] + assert "status" not in event.get("tags", {}) assert event["contexts"]["trace"]["status"] == "ok" From 9313c6915f87d46e51c0182bcedfd63f2f92bbc8 Mon Sep 17 00:00:00 2001 From: Ivana Kellyer Date: Wed, 26 Feb 2025 13:55:03 +0100 Subject: [PATCH 185/244] Set HTTP client breadcrumb level based on status code (#4090) On potel-base, we got rid of the `maybe_create_breadcrumbs` function in favor of creating breadcrumbs manually, so the new breadcrumb level logic needs to go directly in the affected integrations. Closes https://github.com/getsentry/sentry-python/issues/4066 --- sentry_sdk/integrations/aiohttp.py | 5 ++++- sentry_sdk/integrations/httpx.py | 2 ++ sentry_sdk/integrations/stdlib.py | 9 ++++++--- sentry_sdk/utils.py | 11 +++++++++++ tests/integrations/aiohttp/test_aiohttp.py | 9 +++------ tests/integrations/httpx/test_httpx.py | 11 +++-------- tests/integrations/requests/test_requests.py | 11 +++-------- tests/integrations/stdlib/test_httplib.py | 11 +++-------- 8 files changed, 35 insertions(+), 34 deletions(-) diff --git a/sentry_sdk/integrations/aiohttp.py b/sentry_sdk/integrations/aiohttp.py index 6efdd5d883..62af0406cb 100644 --- a/sentry_sdk/integrations/aiohttp.py +++ b/sentry_sdk/integrations/aiohttp.py @@ -27,6 +27,7 @@ capture_internal_exceptions, ensure_integration_enabled, event_from_exception, + http_client_status_to_breadcrumb_level, logger, parse_url, parse_version, @@ -277,13 +278,15 @@ async def on_request_end(session, trace_config_ctx, params): return span_data = trace_config_ctx.span_data or {} - span_data[SPANDATA.HTTP_STATUS_CODE] = int(params.response.status) + status_code = int(params.response.status) + span_data[SPANDATA.HTTP_STATUS_CODE] = status_code span_data["reason"] = params.response.reason sentry_sdk.add_breadcrumb( type="http", category="httplib", data=span_data, + level=http_client_status_to_breadcrumb_level(status_code), ) span = trace_config_ctx.span diff --git a/sentry_sdk/integrations/httpx.py b/sentry_sdk/integrations/httpx.py index 61188f9ef3..4c64f232ef 100644 --- a/sentry_sdk/integrations/httpx.py +++ b/sentry_sdk/integrations/httpx.py @@ -7,6 +7,7 @@ SENSITIVE_DATA_SUBSTITUTE, capture_internal_exceptions, ensure_integration_enabled, + http_client_status_to_breadcrumb_level, logger, parse_url, ) @@ -101,6 +102,7 @@ def send(self, request, **kwargs): type="http", category="httplib", data=data, + level=http_client_status_to_breadcrumb_level(rv.status_code), ) return rv diff --git a/sentry_sdk/integrations/stdlib.py b/sentry_sdk/integrations/stdlib.py index 7b704593db..a6db07f48f 100644 --- a/sentry_sdk/integrations/stdlib.py +++ b/sentry_sdk/integrations/stdlib.py @@ -14,6 +14,7 @@ capture_internal_exceptions, ensure_integration_enabled, get_current_thread_meta, + http_client_status_to_breadcrumb_level, is_sentry_url, logger, safe_repr, @@ -144,14 +145,16 @@ def getresponse(self, *args, **kwargs): span_data[SPANDATA.HTTP_STATUS_CODE] = int(rv.status) span_data["reason"] = rv.reason + status_code = int(rv.status) + span.set_http_status(status_code) + span.set_data("reason", rv.reason) + sentry_sdk.add_breadcrumb( type="http", category="httplib", data=span_data, + level=http_client_status_to_breadcrumb_level(status_code), ) - - span.set_http_status(int(rv.status)) - span.set_data("reason", rv.reason) finally: span.__exit__(None, None, None) diff --git a/sentry_sdk/utils.py b/sentry_sdk/utils.py index d7aa316e40..22bb09c242 100644 --- a/sentry_sdk/utils.py +++ b/sentry_sdk/utils.py @@ -1886,3 +1886,14 @@ def should_be_treated_as_error(ty, value): return False return True + + +def http_client_status_to_breadcrumb_level(status_code): + # type: (Optional[int]) -> str + if status_code is not None: + if 500 <= status_code <= 599: + return "error" + elif 400 <= status_code <= 499: + return "warning" + + return "info" diff --git a/tests/integrations/aiohttp/test_aiohttp.py b/tests/integrations/aiohttp/test_aiohttp.py index b3a1f1b6a7..f5fa766eae 100644 --- a/tests/integrations/aiohttp/test_aiohttp.py +++ b/tests/integrations/aiohttp/test_aiohttp.py @@ -534,8 +534,8 @@ async def handler(request): @pytest.mark.parametrize( "status_code,level", [ - (200, None), - (301, None), + (200, "info"), + (301, "info"), (403, "warning"), (405, "warning"), (500, "error"), @@ -570,10 +570,7 @@ async def handler(request): crumb = event["breadcrumbs"]["values"][0] assert crumb["type"] == "http" - if level is None: - assert "level" not in crumb - else: - assert crumb["level"] == level + assert crumb["level"] == level assert crumb["category"] == "httplib" assert crumb["data"] == ApproxDict( { diff --git a/tests/integrations/httpx/test_httpx.py b/tests/integrations/httpx/test_httpx.py index f59c14e761..e5ebb19bde 100644 --- a/tests/integrations/httpx/test_httpx.py +++ b/tests/integrations/httpx/test_httpx.py @@ -64,8 +64,8 @@ def before_breadcrumb(crumb, hint): @pytest.mark.parametrize( "status_code,level", [ - (200, None), - (301, None), + (200, "info"), + (301, "info"), (403, "warning"), (405, "warning"), (500, "error"), @@ -98,12 +98,7 @@ def test_crumb_capture_client_error( crumb = event["breadcrumbs"]["values"][0] assert crumb["type"] == "http" assert crumb["category"] == "httplib" - - if level is None: - assert "level" not in crumb - else: - assert crumb["level"] == level - + assert crumb["level"] == level assert crumb["data"] == ApproxDict( { "url": url, diff --git a/tests/integrations/requests/test_requests.py b/tests/integrations/requests/test_requests.py index 8cfc0f932f..3862763a75 100644 --- a/tests/integrations/requests/test_requests.py +++ b/tests/integrations/requests/test_requests.py @@ -43,8 +43,8 @@ def test_crumb_capture(sentry_init, capture_events): @pytest.mark.parametrize( "status_code,level", [ - (200, None), - (301, None), + (200, "info"), + (301, "info"), (403, "warning"), (405, "warning"), (500, "error"), @@ -66,12 +66,7 @@ def test_crumb_capture_client_error(sentry_init, capture_events, status_code, le (crumb,) = event["breadcrumbs"]["values"] assert crumb["type"] == "http" assert crumb["category"] == "httplib" - - if level is None: - assert "level" not in crumb - else: - assert crumb["level"] == level - + assert crumb["level"] == level assert crumb["data"] == ApproxDict( { "url": url, diff --git a/tests/integrations/stdlib/test_httplib.py b/tests/integrations/stdlib/test_httplib.py index 0b09f8483a..1cad653558 100644 --- a/tests/integrations/stdlib/test_httplib.py +++ b/tests/integrations/stdlib/test_httplib.py @@ -70,8 +70,8 @@ def test_crumb_capture(sentry_init, capture_events): @pytest.mark.parametrize( "status_code,level", [ - (200, None), - (301, None), + (200, "info"), + (301, "info"), (403, "warning"), (405, "warning"), (500, "error"), @@ -94,12 +94,7 @@ def test_crumb_capture_client_error(sentry_init, capture_events, status_code, le assert crumb["type"] == "http" assert crumb["category"] == "httplib" - - if level is None: - assert "level" not in crumb - else: - assert crumb["level"] == level - + assert crumb["level"] == level assert crumb["data"] == ApproxDict( { "url": url, From 29d0819a5d0d9c053e4dafdb7e6574ada3f0327e Mon Sep 17 00:00:00 2001 From: Ivana Kellyer Date: Thu, 27 Feb 2025 14:06:12 +0100 Subject: [PATCH 186/244] Fix breadcrumb level for HTTP client breadcrumbs in POTel (#4104) Oversight from https://github.com/getsentry/sentry-python/pull/4090 -- forgot to update the breadcrumb level created by the async httpx client Makes all Network tests green. --- sentry_sdk/integrations/httpx.py | 1 + 1 file changed, 1 insertion(+) diff --git a/sentry_sdk/integrations/httpx.py b/sentry_sdk/integrations/httpx.py index 4c64f232ef..1ac2708f32 100644 --- a/sentry_sdk/integrations/httpx.py +++ b/sentry_sdk/integrations/httpx.py @@ -174,6 +174,7 @@ async def send(self, request, **kwargs): type="http", category="httplib", data=data, + level=http_client_status_to_breadcrumb_level(rv.status_code), ) return rv From 30e1071e171f7fb968187e6db879eba959e267ae Mon Sep 17 00:00:00 2001 From: Ivana Kellyer Date: Fri, 28 Feb 2025 10:53:59 +0100 Subject: [PATCH 187/244] Port `sample_rate` update to `potel-base` (#4069) If the SDK uses a specific sample rate for a trace, it needs to update it in the DSC for downstream SDKs. On an incoming trace, the DSC's sample_rate is updated if: - an explicit sampling decision is forced, e.g. startTransaction(sampled: true) - the tracesSampler is invoked - the tracesSampleRate is used Closes https://github.com/getsentry/sentry-python/issues/4028 --------- Co-authored-by: Anton Pirker --- .../integrations/opentelemetry/sampler.py | 60 ++++- .../integrations/opentelemetry/scope.py | 2 +- tests/test_dsc.py | 219 +++++++++++++++++- 3 files changed, 264 insertions(+), 17 deletions(-) diff --git a/sentry_sdk/integrations/opentelemetry/sampler.py b/sentry_sdk/integrations/opentelemetry/sampler.py index 0b7004dc34..a257f76f1e 100644 --- a/sentry_sdk/integrations/opentelemetry/sampler.py +++ b/sentry_sdk/integrations/opentelemetry/sampler.py @@ -50,9 +50,30 @@ def get_parent_sampled(parent_context, trace_id): return None +def get_parent_sample_rate(parent_context, trace_id): + # type: (Optional[SpanContext], int) -> Optional[float] + if parent_context is None: + return None + + is_span_context_valid = parent_context is not None and parent_context.is_valid + + if is_span_context_valid and parent_context.trace_id == trace_id: + parent_sample_rate = parent_context.trace_state.get(TRACESTATE_SAMPLE_RATE_KEY) + if parent_sample_rate is None: + return None + + try: + return float(parent_sample_rate) + except Exception: + return None + + return None + + def dropped_result(parent_span_context, attributes, sample_rate=None): # type: (SpanContext, Attributes, Optional[float]) -> SamplingResult # these will only be added the first time in a root span sampling decision + # if sample_rate is provided, it'll be updated in trace state trace_state = parent_span_context.trace_state if TRACESTATE_SAMPLED_KEY not in trace_state: @@ -60,8 +81,8 @@ def dropped_result(parent_span_context, attributes, sample_rate=None): elif trace_state.get(TRACESTATE_SAMPLED_KEY) == "deferred": trace_state = trace_state.update(TRACESTATE_SAMPLED_KEY, "false") - if sample_rate and TRACESTATE_SAMPLE_RATE_KEY not in trace_state: - trace_state = trace_state.add(TRACESTATE_SAMPLE_RATE_KEY, str(sample_rate)) + if sample_rate is not None: + trace_state = trace_state.update(TRACESTATE_SAMPLE_RATE_KEY, str(sample_rate)) is_root_span = not ( parent_span_context.is_valid and not parent_span_context.is_remote @@ -88,8 +109,9 @@ def dropped_result(parent_span_context, attributes, sample_rate=None): def sampled_result(span_context, attributes, sample_rate): - # type: (SpanContext, Attributes, float) -> SamplingResult + # type: (SpanContext, Attributes, Optional[float]) -> SamplingResult # these will only be added the first time in a root span sampling decision + # if sample_rate is provided, it'll be updated in trace state trace_state = span_context.trace_state if TRACESTATE_SAMPLED_KEY not in trace_state: @@ -97,8 +119,8 @@ def sampled_result(span_context, attributes, sample_rate): elif trace_state.get(TRACESTATE_SAMPLED_KEY) == "deferred": trace_state = trace_state.update(TRACESTATE_SAMPLED_KEY, "true") - if TRACESTATE_SAMPLE_RATE_KEY not in trace_state: - trace_state = trace_state.add(TRACESTATE_SAMPLE_RATE_KEY, str(sample_rate)) + if sample_rate is not None: + trace_state = trace_state.update(TRACESTATE_SAMPLE_RATE_KEY, str(sample_rate)) return SamplingResult( Decision.RECORD_AND_SAMPLE, @@ -142,9 +164,13 @@ def should_sample( if is_root_span: sample_rate = float(custom_sampled) if sample_rate > 0: - return sampled_result(parent_span_context, attributes, sample_rate) + return sampled_result( + parent_span_context, attributes, sample_rate=sample_rate + ) else: - return dropped_result(parent_span_context, attributes) + return dropped_result( + parent_span_context, attributes, sample_rate=sample_rate + ) else: logger.debug( f"[Tracing] Ignoring sampled param for non-root span {name}" @@ -154,19 +180,27 @@ def should_sample( # Traces_sampler is responsible to check parent sampled to have full transactions. has_traces_sampler = callable(client.options.get("traces_sampler")) + sample_rate_to_propagate = None + if is_root_span and has_traces_sampler: sampling_context = create_sampling_context( name, attributes, parent_span_context, trace_id ) sample_rate = client.options["traces_sampler"](sampling_context) + sample_rate_to_propagate = sample_rate else: # Check if there is a parent with a sampling decision parent_sampled = get_parent_sampled(parent_span_context, trace_id) + parent_sample_rate = get_parent_sample_rate(parent_span_context, trace_id) if parent_sampled is not None: - sample_rate = parent_sampled + sample_rate = bool(parent_sampled) + sample_rate_to_propagate = ( + parent_sample_rate if parent_sample_rate else sample_rate + ) else: # Check if there is a traces_sample_rate sample_rate = client.options.get("traces_sample_rate") + sample_rate_to_propagate = sample_rate # If the sample rate is invalid, drop the span if not is_valid_sample_rate(sample_rate, source=self.__class__.__name__): @@ -178,15 +212,21 @@ def should_sample( # Down-sample in case of back pressure monitor says so if is_root_span and client.monitor: sample_rate /= 2**client.monitor.downsample_factor + if client.monitor.downsample_factor > 0: + sample_rate_to_propagate = sample_rate # Roll the dice on sample rate sample_rate = float(cast("Union[bool, float, int]", sample_rate)) sampled = random.random() < sample_rate if sampled: - return sampled_result(parent_span_context, attributes, sample_rate) + return sampled_result( + parent_span_context, attributes, sample_rate=sample_rate_to_propagate + ) else: - return dropped_result(parent_span_context, attributes, sample_rate) + return dropped_result( + parent_span_context, attributes, sample_rate=sample_rate_to_propagate + ) def get_description(self) -> str: return self.__class__.__name__ diff --git a/sentry_sdk/integrations/opentelemetry/scope.py b/sentry_sdk/integrations/opentelemetry/scope.py index c60e5eb716..c04c299e38 100644 --- a/sentry_sdk/integrations/opentelemetry/scope.py +++ b/sentry_sdk/integrations/opentelemetry/scope.py @@ -123,7 +123,7 @@ def _incoming_otel_span_context(self): # for twp to work, we also need to consider deferred sampling when the sampling # flag is not present, so the above TraceFlags are not sufficient if self._propagation_context.parent_sampled is None: - trace_state = trace_state.add(TRACESTATE_SAMPLED_KEY, "deferred") + trace_state = trace_state.update(TRACESTATE_SAMPLED_KEY, "deferred") span_context = SpanContext( trace_id=int(self._propagation_context.trace_id, 16), diff --git a/tests/test_dsc.py b/tests/test_dsc.py index 45d3be6897..9698bcd8d0 100644 --- a/tests/test_dsc.py +++ b/tests/test_dsc.py @@ -8,6 +8,7 @@ This is not tested in this file. """ +import random from unittest import mock import pytest @@ -117,7 +118,7 @@ def test_dsc_continuation_of_trace(sentry_init, capture_envelopes): assert "sample_rate" in envelope_trace_header assert type(envelope_trace_header["sample_rate"]) == str - assert envelope_trace_header["sample_rate"] == "1.0" + assert envelope_trace_header["sample_rate"] == "0.01337" assert "sampled" in envelope_trace_header assert type(envelope_trace_header["sampled"]) == str @@ -137,7 +138,7 @@ def test_dsc_continuation_of_trace(sentry_init, capture_envelopes): def test_dsc_continuation_of_trace_sample_rate_changed_in_traces_sampler( - sentry_init, capture_envelopes + sentry_init, capture_envelopes, monkeypatch ): """ Another service calls our service and passes tracing information to us. @@ -175,10 +176,10 @@ def my_traces_sampler(sampling_context): } # We continue the incoming trace and start a new transaction - with mock.patch("sentry_sdk.tracing_utils.Random.uniform", return_value=0.125): - with sentry_sdk.continue_trace(incoming_http_headers): - with sentry_sdk.start_span(name="foo"): - pass + monkeypatch.setattr(random, "random", lambda: 0.125) + with sentry_sdk.continue_trace(incoming_http_headers): + with sentry_sdk.start_span(name="foo"): + pass assert len(envelopes) == 1 @@ -214,6 +215,212 @@ def my_traces_sampler(sampling_context): assert envelope_trace_header["transaction"] == "bar" +@pytest.mark.parametrize( + "test_data, expected_sample_rate, expected_sampled", + [ + # Test data: + # "incoming_sample_rate": + # The "sentry-sample_rate" in the incoming `baggage` header. + # "incoming_sampled": + # The "sentry-sampled" in the incoming `baggage` header. + # "sentry_trace_header_parent_sampled": + # The number at the end in the `sentry-trace` header, called "parent_sampled". + # "use_local_traces_sampler": + # Whether the local traces sampler is used. + # "local_traces_sampler_result": + # The result of the local traces sampler. + # "local_traces_sample_rate": + # The `traces_sample_rate` setting in the local `sentry_init` call. + ( + { + "incoming_sample_rate": 1.0, + "incoming_sampled": "true", + "sentry_trace_header_parent_sampled": 1, + "use_local_traces_sampler": False, + "local_traces_sampler_result": None, + "local_traces_sample_rate": 0.7, + }, + 1.0, # expected_sample_rate + "true", # expected_sampled + ), + ( + { + "incoming_sample_rate": 1.0, + "incoming_sampled": "true", + "sentry_trace_header_parent_sampled": 1, + "use_local_traces_sampler": True, + "local_traces_sampler_result": 0.5, + "local_traces_sample_rate": 0.7, + }, + 0.5, # expected_sample_rate + "true", # expected_sampled + ), + ( + { + "incoming_sample_rate": 1.0, + "incoming_sampled": "false", + "sentry_trace_header_parent_sampled": 0, + "use_local_traces_sampler": False, + "local_traces_sampler_result": None, + "local_traces_sample_rate": 0.7, + }, + None, # expected_sample_rate + "tracing-disabled-no-transactions-should-be-sent", # expected_sampled (because the parent sampled is 0) + ), + ( + { + "incoming_sample_rate": 1.0, + "incoming_sampled": "false", + "sentry_trace_header_parent_sampled": 0, + "use_local_traces_sampler": True, + "local_traces_sampler_result": 0.5, + "local_traces_sample_rate": 0.7, + }, + 0.5, # expected_sample_rate + "false", # expected_sampled (traces sampler can override parent sampled) + ), + ( + { + "incoming_sample_rate": 1.0, + "incoming_sampled": "true", + "sentry_trace_header_parent_sampled": 1, + "use_local_traces_sampler": False, + "local_traces_sampler_result": None, + "local_traces_sample_rate": None, + }, + None, # expected_sample_rate + "tracing-disabled-no-transactions-should-be-sent", # expected_sampled (traces_sample_rate=None disables all transaction creation) + ), + ( + { + "incoming_sample_rate": 1.0, + "incoming_sampled": "true", + "sentry_trace_header_parent_sampled": 1, + "use_local_traces_sampler": True, + "local_traces_sampler_result": 0.5, + "local_traces_sample_rate": None, + }, + 0.5, # expected_sample_rate + "true", # expected_sampled (traces sampler overrides the traces_sample_rate setting, so transactions are created) + ), + ( + { + "incoming_sample_rate": 1.0, + "incoming_sampled": "false", + "sentry_trace_header_parent_sampled": 0, + "use_local_traces_sampler": False, + "local_traces_sampler_result": None, + "local_traces_sample_rate": None, + }, + None, # expected_sample_rate + "tracing-disabled-no-transactions-should-be-sent", # expected_sampled (traces_sample_rate=None disables all transaction creation) + ), + ( + { + "incoming_sample_rate": 1.0, + "incoming_sampled": "false", + "sentry_trace_header_parent_sampled": 0, + "use_local_traces_sampler": True, + "local_traces_sampler_result": 0.5, + "local_traces_sample_rate": None, + }, + 0.5, # expected_sample_rate + "false", # expected_sampled + ), + ( + { + "incoming_sample_rate": 1.0, + "incoming_sampled": None, + "sentry_trace_header_parent_sampled": None, + "use_local_traces_sampler": False, + "local_traces_sampler_result": 0.5, + "local_traces_sample_rate": 0.7, + }, + 0.7, # expected_sample_rate + "true", # expected_sampled + ), + ], + ids=( + "1 traces_sample_rate does not override incoming", + "2 traces_sampler overrides incoming", + "3 traces_sample_rate does not overrides incoming sample rate or parent (incoming not sampled)", + "4 traces_sampler overrides incoming (incoming not sampled)", + "5 forwarding incoming (traces_sample_rate not set)", + "6 traces_sampler overrides incoming (traces_sample_rate not set)", + "7 forwarding incoming (traces_sample_rate not set) (incoming not sampled)", + "8 traces_sampler overrides incoming (traces_sample_rate not set) (incoming not sampled)", + "9 traces_sample_rate overrides incoming (upstream deferred sampling decision)", + ), +) +def test_dsc_sample_rate_change( + sentry_init, + capture_envelopes, + test_data, + expected_sample_rate, + expected_sampled, +): + """ + Another service calls our service and passes tracing information to us. + Our service is continuing the trace, but modifies the sample rate. + The DSC in transaction envelopes should contain the updated sample rate. + """ + + def my_traces_sampler(sampling_context): + return test_data["local_traces_sampler_result"] + + init_kwargs = { + "dsn": "https://mysecret@bla.ingest.sentry.io/12312012", + "release": "myapp@0.0.1", + "environment": "canary", + } + + if test_data["local_traces_sample_rate"]: + init_kwargs["traces_sample_rate"] = test_data["local_traces_sample_rate"] + + if test_data["use_local_traces_sampler"]: + init_kwargs["traces_sampler"] = my_traces_sampler + + sentry_init(**init_kwargs) + envelopes = capture_envelopes() + + # This is what the upstream service sends us + incoming_trace_id = "771a43a4192642f0b136d5159a501700" + if test_data["sentry_trace_header_parent_sampled"] is None: + sentry_trace = f"{incoming_trace_id}-1234567890abcdef" + else: + sentry_trace = f"{incoming_trace_id}-1234567890abcdef-{test_data['sentry_trace_header_parent_sampled']}" + + baggage = ( + f"sentry-trace_id={incoming_trace_id}, " + f"sentry-sample_rate={str(test_data['incoming_sample_rate'])}, " + f"sentry-sampled={test_data['incoming_sampled']}, " + "sentry-public_key=frontendpublickey, " + "sentry-release=myapp@0.0.1, " + "sentry-environment=prod, " + "sentry-transaction=foo, " + ) + incoming_http_headers = { + "HTTP_SENTRY_TRACE": sentry_trace, + "HTTP_BAGGAGE": baggage, + } + + # We continue the incoming trace and start a new transaction + with mock.patch.object(random, "random", return_value=0.2): + with sentry_sdk.continue_trace(incoming_http_headers): + with sentry_sdk.start_span(name="foo"): + pass + + if expected_sampled == "tracing-disabled-no-transactions-should-be-sent": + assert len(envelopes) == 0 + else: + transaction_envelope = envelopes[0] + dsc_in_envelope_header = transaction_envelope.headers["trace"] + + assert dsc_in_envelope_header["sample_rate"] == str(expected_sample_rate) + assert dsc_in_envelope_header["sampled"] == str(expected_sampled).lower() + assert dsc_in_envelope_header["trace_id"] == incoming_trace_id + + def test_dsc_issue(sentry_init, capture_envelopes): """ Our service is a standalone service that does not have tracing enabled. Just uses Sentry for error reporting. From 889aec41b793b01de3dc8b7156b2337776230363 Mon Sep 17 00:00:00 2001 From: Ivana Kellyer Date: Thu, 6 Mar 2025 11:58:33 +0100 Subject: [PATCH 188/244] Record number of dropped spans in POTel (#4092) Closes https://github.com/getsentry/sentry-python/issues/4067 Possible follow-up: https://github.com/getsentry/sentry-python/issues/4103 --- .../opentelemetry/span_processor.py | 18 ++++++++++++++---- tests/tracing/test_misc.py | 5 ++--- 2 files changed, 16 insertions(+), 7 deletions(-) diff --git a/sentry_sdk/integrations/opentelemetry/span_processor.py b/sentry_sdk/integrations/opentelemetry/span_processor.py index bf3ff62d1b..c7b3fa30ab 100644 --- a/sentry_sdk/integrations/opentelemetry/span_processor.py +++ b/sentry_sdk/integrations/opentelemetry/span_processor.py @@ -63,6 +63,7 @@ def __init__(self): self._children_spans = defaultdict( list ) # type: DefaultDict[int, List[ReadableSpan]] + self._dropped_spans = defaultdict(lambda: 0) # type: DefaultDict[int, int] def on_start(self, span, parent_context=None): # type: (Span, Optional[Context]) -> None @@ -143,12 +144,17 @@ def _flush_root_span(self, span): if not transaction_event: return + collected_spans, dropped_spans = self._collect_children(span) spans = [] - for child in self._collect_children(span): + for child in collected_spans: span_json = self._span_to_json(child) if span_json: spans.append(span_json) + transaction_event["spans"] = spans + if dropped_spans > 0: + transaction_event["_dropped_spans"] = dropped_spans + # TODO-neel-potel sort and cutoff max spans sentry_sdk.capture_event(transaction_event) @@ -166,25 +172,29 @@ def _append_child_span(self, span): children_spans = self._children_spans[span.parent.span_id] if len(children_spans) < max_spans: children_spans.append(span) + else: + self._dropped_spans[span.parent.span_id] += 1 def _collect_children(self, span): - # type: (ReadableSpan) -> List[ReadableSpan] + # type: (ReadableSpan) -> tuple[List[ReadableSpan], int] if not span.context: - return [] + return [], 0 children = [] + dropped_spans = 0 bfs_queue = deque() # type: Deque[int] bfs_queue.append(span.context.span_id) while bfs_queue: parent_span_id = bfs_queue.popleft() node_children = self._children_spans.pop(parent_span_id, []) + dropped_spans += self._dropped_spans.pop(parent_span_id, 0) children.extend(node_children) bfs_queue.extend( [child.context.span_id for child in node_children if child.context] ) - return children + return children, dropped_spans # we construct the event from scratch here # and not use the current Transaction class for easier refactoring diff --git a/tests/tracing/test_misc.py b/tests/tracing/test_misc.py index 0ff5fa6e7d..a807c6eb74 100644 --- a/tests/tracing/test_misc.py +++ b/tests/tracing/test_misc.py @@ -15,7 +15,7 @@ def test_span_trimming(sentry_init, capture_events): with start_span(name="hi"): for i in range(10): - with start_span(op="foo{}".format(i)): + with start_span(op=f"foo{i}"): pass (event,) = events @@ -29,7 +29,6 @@ def test_span_trimming(sentry_init, capture_events): assert event["_meta"]["spans"][""]["len"] == 10 assert "_dropped_spans" not in event - assert "dropped_spans" not in event def test_span_data_scrubbing_and_trimming(sentry_init, capture_events): @@ -42,7 +41,7 @@ def test_span_data_scrubbing_and_trimming(sentry_init, capture_events): span.set_data("datafoo", "databar") for i in range(10): - with start_span(op="foo{}".format(i)): + with start_span(op=f"foo{i}"): pass (event,) = events From 88e6716cdfe641117fa413b668c43ea53efb33bd Mon Sep 17 00:00:00 2001 From: Ivana Kellyer Date: Wed, 19 Mar 2025 13:52:10 +0100 Subject: [PATCH 189/244] feat(django): Use `functools.wraps` in more places (#4144) We're not using `@functools.wraps` in a handful of places in the Django integration, leading to the wrapped functions reporting wrong (i.e., Sentry wrapper) names when inspected. Closes https://github.com/getsentry/sentry-python/issues/4138 --------- Co-authored-by: Daniel Szoke <7881302+szokeasaurusrex@users.noreply.github.com> --- sentry_sdk/integrations/django/__init__.py | 3 +++ sentry_sdk/integrations/django/asgi.py | 3 +++ sentry_sdk/integrations/django/signals_handlers.py | 1 + sentry_sdk/integrations/django/views.py | 1 + 4 files changed, 8 insertions(+) diff --git a/sentry_sdk/integrations/django/__init__.py b/sentry_sdk/integrations/django/__init__.py index e8aa673787..f6cbd8c657 100644 --- a/sentry_sdk/integrations/django/__init__.py +++ b/sentry_sdk/integrations/django/__init__.py @@ -1,3 +1,4 @@ +import functools import inspect import sys import threading @@ -321,6 +322,7 @@ def _patch_drf(): else: old_drf_initial = APIView.initial + @functools.wraps(old_drf_initial) def sentry_patched_drf_initial(self, request, *args, **kwargs): # type: (APIView, Any, *Any, **Any) -> Any with capture_internal_exceptions(): @@ -471,6 +473,7 @@ def _patch_get_response(): old_get_response = BaseHandler.get_response + @functools.wraps(old_get_response) def sentry_patched_get_response(self, request): # type: (Any, WSGIRequest) -> Union[HttpResponse, BaseException] _before_get_response(request) diff --git a/sentry_sdk/integrations/django/asgi.py b/sentry_sdk/integrations/django/asgi.py index daa1498c58..0ca1c080fd 100644 --- a/sentry_sdk/integrations/django/asgi.py +++ b/sentry_sdk/integrations/django/asgi.py @@ -88,6 +88,7 @@ def patch_django_asgi_handler_impl(cls): old_app = cls.__call__ + @functools.wraps(old_app) async def sentry_patched_asgi_handler(self, scope, receive, send): # type: (Any, Any, Any, Any) -> Any integration = sentry_sdk.get_client().get_integration(DjangoIntegration) @@ -125,6 +126,7 @@ def patch_get_response_async(cls, _before_get_response): # type: (Any, Any) -> None old_get_response_async = cls.get_response_async + @functools.wraps(old_get_response_async) async def sentry_patched_get_response_async(self, request): # type: (Any, Any) -> Union[HttpResponse, BaseException] _before_get_response(request) @@ -142,6 +144,7 @@ def patch_channels_asgi_handler_impl(cls): if channels.__version__ < "3.0.0": old_app = cls.__call__ + @functools.wraps(old_app) async def sentry_patched_asgi_handler(self, receive, send): # type: (Any, Any, Any) -> Any integration = sentry_sdk.get_client().get_integration(DjangoIntegration) diff --git a/sentry_sdk/integrations/django/signals_handlers.py b/sentry_sdk/integrations/django/signals_handlers.py index ae948cec2a..69c1a3cdfb 100644 --- a/sentry_sdk/integrations/django/signals_handlers.py +++ b/sentry_sdk/integrations/django/signals_handlers.py @@ -50,6 +50,7 @@ def patch_signals(): old_live_receivers = Signal._live_receivers + @wraps(old_live_receivers) def _sentry_live_receivers(self, sender): # type: (Signal, Any) -> Union[tuple[list[Callable[..., Any]], list[Callable[..., Any]]], list[Callable[..., Any]]] if DJANGO_VERSION >= (5, 0): diff --git a/sentry_sdk/integrations/django/views.py b/sentry_sdk/integrations/django/views.py index e8dfa8abb6..aa2140764c 100644 --- a/sentry_sdk/integrations/django/views.py +++ b/sentry_sdk/integrations/django/views.py @@ -31,6 +31,7 @@ def patch_views(): old_make_view_atomic = BaseHandler.make_view_atomic old_render = SimpleTemplateResponse.render + @functools.wraps(old_render) def sentry_patched_render(self): # type: (SimpleTemplateResponse) -> Any with sentry_sdk.start_span( From 1ba5c664470ed795badf9f119b3e87c9776bd060 Mon Sep 17 00:00:00 2001 From: Ivana Kellyer Date: Thu, 20 Mar 2025 11:15:38 +0100 Subject: [PATCH 190/244] Fix mypy (#4169) --- sentry_sdk/client.py | 2 +- sentry_sdk/integrations/opentelemetry/contextvars_context.py | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/sentry_sdk/client.py b/sentry_sdk/client.py index 8887a136b2..bce5361572 100644 --- a/sentry_sdk/client.py +++ b/sentry_sdk/client.py @@ -566,7 +566,7 @@ def _prepare_event( if event.get("exception"): DedupeIntegration.reset_last_seen() - event = new_event + event = new_event # type: Optional[Event] # type: ignore[no-redef] before_send_transaction = self.options["before_send_transaction"] if ( diff --git a/sentry_sdk/integrations/opentelemetry/contextvars_context.py b/sentry_sdk/integrations/opentelemetry/contextvars_context.py index df818bc399..7e61a45bc5 100644 --- a/sentry_sdk/integrations/opentelemetry/contextvars_context.py +++ b/sentry_sdk/integrations/opentelemetry/contextvars_context.py @@ -14,12 +14,13 @@ if TYPE_CHECKING: from typing import Optional + from contextvars import Token import sentry_sdk.integrations.opentelemetry.scope as scope class SentryContextVarsRuntimeContext(ContextVarsRuntimeContext): def attach(self, context): - # type: (Context) -> object + # type: (Context) -> Token[Context] scopes = get_value(SENTRY_SCOPES_KEY, context) should_fork_isolation_scope = context.pop( From bd17c62946978a2d7d4abeb6bbb57e4869b00b64 Mon Sep 17 00:00:00 2001 From: Ivana Kellyer Date: Thu, 20 Mar 2025 11:36:58 +0100 Subject: [PATCH 191/244] Remove start/stop_profile_session in favor of start/stop_profiler (#4170) Closes https://github.com/getsentry/sentry-python/issues/4163 --- sentry_sdk/profiler/__init__.py | 4 - sentry_sdk/profiler/continuous_profiler.py | 14 ---- tests/profiler/test_continuous_profiler.py | 86 ++-------------------- 3 files changed, 8 insertions(+), 96 deletions(-) diff --git a/sentry_sdk/profiler/__init__.py b/sentry_sdk/profiler/__init__.py index 0bc63e3a6d..c146dd3a97 100644 --- a/sentry_sdk/profiler/__init__.py +++ b/sentry_sdk/profiler/__init__.py @@ -1,7 +1,5 @@ from sentry_sdk.profiler.continuous_profiler import ( - start_profile_session, start_profiler, - stop_profile_session, stop_profiler, ) from sentry_sdk.profiler.transaction_profiler import ( @@ -25,9 +23,7 @@ ) __all__ = [ - "start_profile_session", # TODO: Deprecate this in favor of `start_profiler` "start_profiler", - "stop_profile_session", # TODO: Deprecate this in favor of `stop_profiler` "stop_profiler", # DEPRECATED: The following was re-exported for backwards compatibility. It # will be removed from sentry_sdk.profiler in a future release. diff --git a/sentry_sdk/profiler/continuous_profiler.py b/sentry_sdk/profiler/continuous_profiler.py index 47f63d8f59..1619925bd2 100644 --- a/sentry_sdk/profiler/continuous_profiler.py +++ b/sentry_sdk/profiler/continuous_profiler.py @@ -151,13 +151,6 @@ def start_profiler(): _scheduler.manual_start() -def start_profile_session(): - # type: () -> None - - # TODO: deprecate this as it'll be replaced by `start_profiler` - start_profiler() - - def stop_profiler(): # type: () -> None if _scheduler is None: @@ -166,13 +159,6 @@ def stop_profiler(): _scheduler.manual_stop() -def stop_profile_session(): - # type: () -> None - - # TODO: deprecate this as it'll be replaced by `stop_profiler` - stop_profiler() - - def teardown_continuous_profiler(): # type: () -> None stop_profiler() diff --git a/tests/profiler/test_continuous_profiler.py b/tests/profiler/test_continuous_profiler.py index 7f1ede0bd1..fa55b0be5f 100644 --- a/tests/profiler/test_continuous_profiler.py +++ b/tests/profiler/test_continuous_profiler.py @@ -11,9 +11,7 @@ get_profiler_id, setup_continuous_profiler, start_profiler, - start_profile_session, stop_profiler, - stop_profile_session, ) from tests.conftest import ApproxDict @@ -209,21 +207,6 @@ def assert_single_transaction_without_profile_chunks(envelopes): pytest.param("gevent", marks=requires_gevent), ], ) -@pytest.mark.parametrize( - ["start_profiler_func", "stop_profiler_func"], - [ - pytest.param( - start_profile_session, - stop_profile_session, - id="start_profile_session/stop_profile_session", - ), - pytest.param( - start_profiler, - stop_profiler, - id="start_profiler/stop_profiler (deprecated)", - ), - ], -) @pytest.mark.parametrize( "make_options", [ @@ -236,8 +219,6 @@ def test_continuous_profiler_auto_start_and_manual_stop( sentry_init, capture_envelopes, mode, - start_profiler_func, - stop_profiler_func, make_options, teardown_profiling, ): @@ -258,7 +239,7 @@ def test_continuous_profiler_auto_start_and_manual_stop( assert_single_transaction_with_profile_chunks(envelopes, thread) for _ in range(3): - stop_profiler_func() + stop_profiler() envelopes.clear() @@ -268,7 +249,7 @@ def test_continuous_profiler_auto_start_and_manual_stop( assert_single_transaction_without_profile_chunks(envelopes) - start_profiler_func() + start_profiler() envelopes.clear() @@ -286,21 +267,6 @@ def test_continuous_profiler_auto_start_and_manual_stop( pytest.param("gevent", marks=requires_gevent), ], ) -@pytest.mark.parametrize( - ["start_profiler_func", "stop_profiler_func"], - [ - pytest.param( - start_profile_session, - stop_profile_session, - id="start_profile_session/stop_profile_session", - ), - pytest.param( - start_profiler, - stop_profiler, - id="start_profiler/stop_profiler (deprecated)", - ), - ], -) @pytest.mark.parametrize( "make_options", [ @@ -313,8 +279,6 @@ def test_continuous_profiler_manual_start_and_stop_sampled( sentry_init, capture_envelopes, mode, - start_profiler_func, - stop_profiler_func, make_options, teardown_profiling, ): @@ -331,7 +295,7 @@ def test_continuous_profiler_manual_start_and_stop_sampled( thread = threading.current_thread() for _ in range(3): - start_profiler_func() + start_profiler() envelopes.clear() @@ -345,7 +309,7 @@ def test_continuous_profiler_manual_start_and_stop_sampled( assert get_profiler_id() is not None, "profiler should be running" - stop_profiler_func() + stop_profiler() # the profiler stops immediately in manual mode assert get_profiler_id() is None, "profiler should not be running" @@ -368,21 +332,6 @@ def test_continuous_profiler_manual_start_and_stop_sampled( pytest.param("gevent", marks=requires_gevent), ], ) -@pytest.mark.parametrize( - ["start_profiler_func", "stop_profiler_func"], - [ - pytest.param( - start_profile_session, - stop_profile_session, - id="start_profile_session/stop_profile_session", - ), - pytest.param( - start_profiler, - stop_profiler, - id="start_profiler/stop_profiler (deprecated)", - ), - ], -) @pytest.mark.parametrize( "make_options", [ @@ -394,8 +343,6 @@ def test_continuous_profiler_manual_start_and_stop_unsampled( sentry_init, capture_envelopes, mode, - start_profiler_func, - stop_profiler_func, make_options, teardown_profiling, ): @@ -409,7 +356,7 @@ def test_continuous_profiler_manual_start_and_stop_unsampled( envelopes = capture_envelopes() - start_profiler_func() + start_profiler() with sentry_sdk.start_span(name="profiling"): with sentry_sdk.start_span(op="op"): @@ -417,7 +364,7 @@ def test_continuous_profiler_manual_start_and_stop_unsampled( assert_single_transaction_without_profile_chunks(envelopes) - stop_profiler_func() + stop_profiler() @pytest.mark.parametrize( @@ -538,21 +485,6 @@ def test_continuous_profiler_auto_start_and_stop_unsampled( ), ], ) -@pytest.mark.parametrize( - ["start_profiler_func", "stop_profiler_func"], - [ - pytest.param( - start_profile_session, - stop_profile_session, - id="start_profile_session/stop_profile_session", - ), - pytest.param( - start_profiler, - stop_profiler, - id="start_profiler/stop_profiler (deprecated)", - ), - ], -) @pytest.mark.parametrize( "make_options", [ @@ -563,8 +495,6 @@ def test_continuous_profiler_auto_start_and_stop_unsampled( def test_continuous_profiler_manual_start_and_stop_noop_when_using_trace_lifecyle( sentry_init, mode, - start_profiler_func, - stop_profiler_func, class_name, make_options, teardown_profiling, @@ -580,11 +510,11 @@ def test_continuous_profiler_manual_start_and_stop_noop_when_using_trace_lifecyl with mock.patch( f"sentry_sdk.profiler.continuous_profiler.{class_name}.ensure_running" ) as mock_ensure_running: - start_profiler_func() + start_profiler() mock_ensure_running.assert_not_called() with mock.patch( f"sentry_sdk.profiler.continuous_profiler.{class_name}.teardown" ) as mock_teardown: - stop_profiler_func() + stop_profiler() mock_teardown.assert_not_called() From d44889f79d6b276d5c36f972388a014faceae5f7 Mon Sep 17 00:00:00 2001 From: Ivana Kellyer Date: Thu, 20 Mar 2025 12:12:17 +0100 Subject: [PATCH 192/244] Rename some transactions to spans --- tests/test_logs.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/tests/test_logs.py b/tests/test_logs.py index 173a4028d6..7f9618912d 100644 --- a/tests/test_logs.py +++ b/tests/test_logs.py @@ -206,20 +206,20 @@ def test_logs_message_params(sentry_init, capture_envelopes): @minimum_python_37 -def test_logs_tied_to_transactions(sentry_init, capture_envelopes): +def test_logs_tied_to_root_spans(sentry_init, capture_envelopes): """ - Log messages are also tied to transactions. + Log messages are also tied to root spans. """ sentry_init(_experiments={"enable_sentry_logs": True}) envelopes = capture_envelopes() - with sentry_sdk.start_transaction(name="test-transaction") as trx: - sentry_logger.warn("This is a log tied to a transaction") + with sentry_sdk.start_span(name="test-root-span") as root_span: + sentry_logger.warn("This is a log tied to a root span.") log_entry = envelopes[0].items[0].payload.json assert log_entry["attributes"][-1] == { "key": "sentry.trace.parent_span_id", - "value": {"stringValue": trx.span_id}, + "value": {"stringValue": root_span.span_id}, } @@ -231,7 +231,7 @@ def test_logs_tied_to_spans(sentry_init, capture_envelopes): sentry_init(_experiments={"enable_sentry_logs": True}) envelopes = capture_envelopes() - with sentry_sdk.start_transaction(name="test-transaction"): + with sentry_sdk.start_span(name="test-root-span"): with sentry_sdk.start_span(description="test-span") as span: sentry_logger.warn("This is a log tied to a span") From 2753face06b02b0aad28e53d73e87ef3b9616cb2 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Thu, 20 Mar 2025 15:55:28 +0100 Subject: [PATCH 193/244] Better handling of exception groups (#4164) Properly handle grouped and chained exceptions. The test case in the linked issue illustrates that some ExceptionGroups have been handled in a wrong way. Updated some tests, because now that those are handled correctly all the mechanism types except for the root exception are set to "chained" like described in the RFC: https://github.com/getsentry/rfcs/blob/main/text/0079-exception-groups.md#interpretation Because this will change the grouping of exiting Sentry Issues containing ExceptionGroups, it is safer to release this fix in the next major and make sure that we describe the change in behavior in the changelog. (Note: The grouping in the Ariadne issues will not change because those are not ExceptionGroups and only updating the `mechanism.type` does not change the grouping) Fixes #3913 --- MIGRATION_GUIDE.md | 1 + sentry_sdk/utils.py | 145 +++++++++--------- tests/integrations/ariadne/test_ariadne.py | 18 ++- .../strawberry/test_strawberry.py | 9 +- tests/test_exceptiongroup.py | 13 +- 5 files changed, 102 insertions(+), 84 deletions(-) diff --git a/MIGRATION_GUIDE.md b/MIGRATION_GUIDE.md index 7a1275b852..263e27e111 100644 --- a/MIGRATION_GUIDE.md +++ b/MIGRATION_GUIDE.md @@ -22,6 +22,7 @@ Looking to upgrade from Sentry SDK 2.x to 3.x? Here's a comprehensive list of wh - clickhouse-driver integration: The query is now available under the `db.query.text` span attribute (only if `send_default_pii` is `True`). - `sentry_sdk.init` now returns `None` instead of a context manager. - The `sampling_context` argument of `traces_sampler` and `profiles_sampler` now additionally contains all span attributes known at span start. +- We updated how we handle `ExceptionGroup`s. You will now get more data if ExceptionGroups are appearing in chained exceptions. It could happen that after updating the SDK the grouping of issues change because of this. So eventually you will see the same exception in two Sentry issues (one from before the update, one from after the update) - The integration-specific content of the `sampling_context` argument of `traces_sampler` and `profiles_sampler` now looks different. - The Celery integration doesn't add the `celery_job` dictionary anymore. Instead, the individual keys are now available as: diff --git a/sentry_sdk/utils.py b/sentry_sdk/utils.py index 22bb09c242..2e6d82d0ae 100644 --- a/sentry_sdk/utils.py +++ b/sentry_sdk/utils.py @@ -775,14 +775,17 @@ def exceptions_from_error( ): # type: (...) -> Tuple[int, List[Dict[str, Any]]] """ - Creates the list of exceptions. - This can include chained exceptions and exceptions from an ExceptionGroup. - - See the Exception Interface documentation for more details: - https://develop.sentry.dev/sdk/event-payloads/exception/ + Converts the given exception information into the Sentry structured "exception" format. + This will return a list of exceptions (a flattened tree of exceptions) in the + format of the Exception Interface documentation: + https://develop.sentry.dev/sdk/data-model/event-payloads/exception/ + + This function can handle: + - simple exceptions + - chained exceptions (raise .. from ..) + - exception groups """ - - parent = single_exception_from_error_tuple( + base_exception = single_exception_from_error_tuple( exc_type=exc_type, exc_value=exc_value, tb=tb, @@ -793,64 +796,63 @@ def exceptions_from_error( source=source, full_stack=full_stack, ) - exceptions = [parent] + exceptions = [base_exception] parent_id = exception_id exception_id += 1 - should_supress_context = hasattr(exc_value, "__suppress_context__") and exc_value.__suppress_context__ # type: ignore - if should_supress_context: - # Add direct cause. - # The field `__cause__` is set when raised with the exception (using the `from` keyword). - exception_has_cause = ( + causing_exception = None + exception_source = None + + # Add any causing exceptions, if present. + should_suppress_context = hasattr(exc_value, "__suppress_context__") and exc_value.__suppress_context__ # type: ignore + # Note: __suppress_context__ is True if the exception is raised with the `from` keyword. + if should_suppress_context: + # Explicitly chained exceptions (Like: raise NewException() from OriginalException()) + # The field `__cause__` is set to OriginalException + has_explicit_causing_exception = ( exc_value and hasattr(exc_value, "__cause__") and exc_value.__cause__ is not None ) - if exception_has_cause: - cause = exc_value.__cause__ # type: ignore - (exception_id, child_exceptions) = exceptions_from_error( - exc_type=type(cause), - exc_value=cause, - tb=getattr(cause, "__traceback__", None), - client_options=client_options, - mechanism=mechanism, - exception_id=exception_id, - source="__cause__", - full_stack=full_stack, - ) - exceptions.extend(child_exceptions) - + if has_explicit_causing_exception: + exception_source = "__cause__" + causing_exception = exc_value.__cause__ # type: ignore else: - # Add indirect cause. - # The field `__context__` is assigned if another exception occurs while handling the exception. - exception_has_content = ( + # Implicitly chained exceptions (when an exception occurs while handling another exception) + # The field `__context__` is set in the exception that occurs while handling another exception, + # to the other exception. + has_implicit_causing_exception = ( exc_value and hasattr(exc_value, "__context__") and exc_value.__context__ is not None ) - if exception_has_content: - context = exc_value.__context__ # type: ignore - (exception_id, child_exceptions) = exceptions_from_error( - exc_type=type(context), - exc_value=context, - tb=getattr(context, "__traceback__", None), - client_options=client_options, - mechanism=mechanism, - exception_id=exception_id, - source="__context__", - full_stack=full_stack, - ) - exceptions.extend(child_exceptions) + if has_implicit_causing_exception: + exception_source = "__context__" + causing_exception = exc_value.__context__ # type: ignore + + if causing_exception: + (exception_id, child_exceptions) = exceptions_from_error( + exc_type=type(causing_exception), + exc_value=causing_exception, + tb=getattr(causing_exception, "__traceback__", None), + client_options=client_options, + mechanism=mechanism, + exception_id=exception_id, + parent_id=parent_id, + source=exception_source, + full_stack=full_stack, + ) + exceptions.extend(child_exceptions) - # Add exceptions from an ExceptionGroup. + # Add child exceptions from an ExceptionGroup. is_exception_group = exc_value and hasattr(exc_value, "exceptions") if is_exception_group: - for idx, e in enumerate(exc_value.exceptions): # type: ignore + for idx, causing_exception in enumerate(exc_value.exceptions): # type: ignore (exception_id, child_exceptions) = exceptions_from_error( - exc_type=type(e), - exc_value=e, - tb=getattr(e, "__traceback__", None), + exc_type=type(causing_exception), + exc_value=causing_exception, + tb=getattr(causing_exception, "__traceback__", None), client_options=client_options, mechanism=mechanism, exception_id=exception_id, @@ -870,38 +872,29 @@ def exceptions_from_error_tuple( full_stack=None, # type: Optional[list[dict[str, Any]]] ): # type: (...) -> List[Dict[str, Any]] + """ + Convert Python's exception information into Sentry's structured "exception" format in the event. + See https://develop.sentry.dev/sdk/data-model/event-payloads/exception/ + This is the entry point for the exception handling. + """ + # unpack the exception info tuple exc_type, exc_value, tb = exc_info - is_exception_group = BaseExceptionGroup is not None and isinstance( - exc_value, BaseExceptionGroup + # let exceptions_from_error do the actual work + _, exceptions = exceptions_from_error( + exc_type=exc_type, + exc_value=exc_value, + tb=tb, + client_options=client_options, + mechanism=mechanism, + exception_id=0, + parent_id=0, + full_stack=full_stack, ) - if is_exception_group: - (_, exceptions) = exceptions_from_error( - exc_type=exc_type, - exc_value=exc_value, - tb=tb, - client_options=client_options, - mechanism=mechanism, - exception_id=0, - parent_id=0, - full_stack=full_stack, - ) - - else: - exceptions = [] - for exc_type, exc_value, tb in walk_exception_chain(exc_info): - exceptions.append( - single_exception_from_error_tuple( - exc_type=exc_type, - exc_value=exc_value, - tb=tb, - client_options=client_options, - mechanism=mechanism, - full_stack=full_stack, - ) - ) - + # make sure the exceptions are sorted + # from the innermost (oldest) + # to the outermost (newest) exception exceptions.reverse() return exceptions diff --git a/tests/integrations/ariadne/test_ariadne.py b/tests/integrations/ariadne/test_ariadne.py index 2c3b086aa5..6637a88451 100644 --- a/tests/integrations/ariadne/test_ariadne.py +++ b/tests/integrations/ariadne/test_ariadne.py @@ -68,7 +68,9 @@ def test_capture_request_and_response_if_send_pii_is_on_async( assert len(events) == 1 (event,) = events - assert event["exception"]["values"][0]["mechanism"]["type"] == "ariadne" + assert len(event["exception"]["values"]) == 2 + assert event["exception"]["values"][0]["mechanism"]["type"] == "chained" + assert event["exception"]["values"][-1]["mechanism"]["type"] == "ariadne" assert event["contexts"]["response"] == { "data": { "data": {"error": None}, @@ -111,7 +113,10 @@ def graphql_server(): assert len(events) == 1 (event,) = events - assert event["exception"]["values"][0]["mechanism"]["type"] == "ariadne" + assert len(event["exception"]["values"]) == 2 + assert event["exception"]["values"][0]["mechanism"]["type"] == "chained" + assert event["exception"]["values"][-1]["mechanism"]["type"] == "ariadne" + assert event["contexts"]["response"] == { "data": { "data": {"error": None}, @@ -152,7 +157,10 @@ def test_do_not_capture_request_and_response_if_send_pii_is_off_async( assert len(events) == 1 (event,) = events - assert event["exception"]["values"][0]["mechanism"]["type"] == "ariadne" + assert len(event["exception"]["values"]) == 2 + assert event["exception"]["values"][0]["mechanism"]["type"] == "chained" + assert event["exception"]["values"][-1]["mechanism"]["type"] == "ariadne" + assert "data" not in event["request"] assert "response" not in event["contexts"] @@ -182,7 +190,9 @@ def graphql_server(): assert len(events) == 1 (event,) = events - assert event["exception"]["values"][0]["mechanism"]["type"] == "ariadne" + assert len(event["exception"]["values"]) == 2 + assert event["exception"]["values"][0]["mechanism"]["type"] == "chained" + assert event["exception"]["values"][-1]["mechanism"]["type"] == "ariadne" assert "data" not in event["request"] assert "response" not in event["contexts"] diff --git a/tests/integrations/strawberry/test_strawberry.py b/tests/integrations/strawberry/test_strawberry.py index fdf7ff71bb..d1774aeca5 100644 --- a/tests/integrations/strawberry/test_strawberry.py +++ b/tests/integrations/strawberry/test_strawberry.py @@ -204,7 +204,9 @@ def test_capture_request_if_available_and_send_pii_is_on( (error_event,) = events - assert error_event["exception"]["values"][0]["mechanism"]["type"] == "strawberry" + assert len(error_event["exception"]["values"]) == 2 + assert error_event["exception"]["values"][0]["mechanism"]["type"] == "chained" + assert error_event["exception"]["values"][-1]["mechanism"]["type"] == "strawberry" assert error_event["request"]["api_target"] == "graphql" assert error_event["request"]["data"] == { "query": query, @@ -258,7 +260,10 @@ def test_do_not_capture_request_if_send_pii_is_off( assert len(events) == 1 (error_event,) = events - assert error_event["exception"]["values"][0]["mechanism"]["type"] == "strawberry" + + assert len(error_event["exception"]["values"]) == 2 + assert error_event["exception"]["values"][0]["mechanism"]["type"] == "chained" + assert error_event["exception"]["values"][-1]["mechanism"]["type"] == "strawberry" assert "data" not in error_event["request"] assert "response" not in error_event["contexts"] diff --git a/tests/test_exceptiongroup.py b/tests/test_exceptiongroup.py index 4c7afc58eb..01ec0a78d4 100644 --- a/tests/test_exceptiongroup.py +++ b/tests/test_exceptiongroup.py @@ -217,7 +217,10 @@ def test_exception_chain_cause(): { "mechanism": { "handled": False, - "type": "test_suite", + "type": "chained", + "exception_id": 1, + "parent_id": 0, + "source": "__cause__", }, "module": None, "type": "TypeError", @@ -227,6 +230,7 @@ def test_exception_chain_cause(): "mechanism": { "handled": False, "type": "test_suite", + "exception_id": 0, }, "module": None, "type": "ValueError", @@ -257,7 +261,10 @@ def test_exception_chain_context(): { "mechanism": { "handled": False, - "type": "test_suite", + "type": "chained", + "exception_id": 1, + "parent_id": 0, + "source": "__context__", }, "module": None, "type": "TypeError", @@ -267,6 +274,7 @@ def test_exception_chain_context(): "mechanism": { "handled": False, "type": "test_suite", + "exception_id": 0, }, "module": None, "type": "ValueError", @@ -297,6 +305,7 @@ def test_simple_exception(): "mechanism": { "handled": False, "type": "test_suite", + "exception_id": 0, }, "module": None, "type": "ValueError", From 0cd2bcea97d7dc7b6a4fc199dce11842868a508b Mon Sep 17 00:00:00 2001 From: Ivana Kellyer Date: Fri, 21 Mar 2025 09:03:27 +0100 Subject: [PATCH 194/244] chore: Drop more deprecated stuff (#4176) Remove the following deprecated stuff: - `configure_debug_hub` from `debug.py` - exported stuff in `profiler/__init__.py` that's not part of public API and was only exported for backwards compat reasons - support for `_experiments['profiler_mode']` and `_experiments['profiles_sample_rate']` which both have non-experimental top-level options now - `Transport.capture_event` - `_FunctionTransport` and support for function transports in general - `enable_tracing` --------- Co-authored-by: Anton Pirker --- MIGRATION_GUIDE.md | 9 ++- sentry_sdk/client.py | 11 ---- sentry_sdk/consts.py | 4 -- sentry_sdk/debug.py | 10 --- sentry_sdk/profiler/__init__.py | 36 ----------- sentry_sdk/profiler/continuous_profiler.py | 8 +-- sentry_sdk/profiler/transaction_profiler.py | 22 +------ sentry_sdk/tracing_utils.py | 9 +-- sentry_sdk/transport.py | 59 +---------------- tests/conftest.py | 2 +- tests/integrations/celery/test_celery.py | 20 +++--- .../celery/test_update_celery_task_headers.py | 40 +----------- tests/integrations/graphene/test_graphene.py | 2 +- .../sqlalchemy/test_sqlalchemy.py | 24 ++++--- tests/profiler/test_transaction_profiler.py | 64 ++++--------------- tests/test_basics.py | 33 +--------- tests/test_client.py | 6 -- tests/test_scope.py | 6 +- 18 files changed, 58 insertions(+), 307 deletions(-) diff --git a/MIGRATION_GUIDE.md b/MIGRATION_GUIDE.md index 263e27e111..d57696d910 100644 --- a/MIGRATION_GUIDE.md +++ b/MIGRATION_GUIDE.md @@ -17,6 +17,8 @@ Looking to upgrade from Sentry SDK 2.x to 3.x? Here's a comprehensive list of wh - `Span.finish()` no longer returns the `event_id` if the event is sent to sentry. - The `Profile()` constructor does not accept a `hub` parameter anymore. - A `Profile` object does not have a `.hub` property anymore. +- `MAX_PROFILE_DURATION_NS`, `PROFILE_MINIMUM_SAMPLES`, `Profile`, `Scheduler`, `ThreadScheduler`, `GeventScheduler`, `has_profiling_enabled`, `setup_profiler`, `teardown_profiler` are no longer accessible from `sentry_sdk.profiler`. They're still accessible from `sentry_sdk.profiler.transaction_profiler`. +- `DEFAULT_SAMPLING_FREQUENCY`, `MAX_STACK_DEPTH`, `get_frame_name`, `extract_frame`, `extract_stack`, `frame_id` are no longer accessible from `sentry_sdk.profiler`. They're still accessible from `sentry_sdk.profiler.utils`. - `sentry_sdk.continue_trace` no longer returns a `Transaction` and is now a context manager. - Redis integration: In Redis pipeline spans there is no `span["data"]["redis.commands"]` that contains a dict `{"count": 3, "first_ten": ["cmd1", "cmd2", ...]}` but instead `span["data"]["redis.commands.count"]` (containing `3`) and `span["data"]["redis.commands.first_ten"]` (containing `["cmd1", "cmd2", ...]`). - clickhouse-driver integration: The query is now available under the `db.query.text` span attribute (only if `send_default_pii` is `True`). @@ -131,6 +133,7 @@ Looking to upgrade from Sentry SDK 2.x to 3.x? Here's a comprehensive list of wh - Spans no longer have a `description`. Use `name` instead. - Dropped support for Python 3.6. +- The `enable_tracing` `init` option has been removed. Configure `traces_sample_rate` directly. - The `custom_sampling_context` parameter of `start_transaction` has been removed. Use `attributes` instead to set key-value pairs of data that should be accessible in the traces sampler. Note that span attributes need to conform to the [OpenTelemetry specification](https://opentelemetry.io/docs/concepts/signals/traces/#attributes), meaning only certain types can be set as values. - The PyMongo integration no longer sets tags. The data is still accessible via span attributes. - The PyMongo integration doesn't set `operation_ids` anymore. The individual IDs (`operation_id`, `request_id`, `session_id`) are now accessible as separate span attributes. @@ -142,13 +145,17 @@ Looking to upgrade from Sentry SDK 2.x to 3.x? Here's a comprehensive list of wh - The context manager `auto_session_tracking()` has been removed. Use `track_session()` instead. - The context manager `auto_session_tracking_scope()` has been removed. Use `track_session()` instead. - Utility function `is_auto_session_tracking_enabled()` has been removed. There is no public replacement. There is a private `_is_auto_session_tracking_enabled()` (if you absolutely need this function) It accepts a `scope` parameter instead of the previously used `hub` parameter. -- Utility function `is_auto_session_tracking_enabled_scope()` has been removed. There is no public replacement. There is a private `_is_auto_session_tracking_enabled()` (if you absolutely need this function) +- Utility function `is_auto_session_tracking_enabled_scope()` has been removed. There is no public replacement. There is a private `_is_auto_session_tracking_enabled()` (if you absolutely need this function). - Setting `scope.level` has been removed. Use `scope.set_level` instead. - `span.containing_transaction` has been removed. Use `span.root_span` instead. - `continue_from_headers`, `continue_from_environ` and `from_traceparent` have been removed, please use top-level API `sentry_sdk.continue_trace` instead. - `PropagationContext` constructor no longer takes a `dynamic_sampling_context` but takes a `baggage` object instead. - `ThreadingIntegration` no longer takes the `propagate_hub` argument. - `Baggage.populate_from_transaction` has been removed. +- `debug.configure_debug_hub` was removed. +- `profiles_sample_rate` and `profiler_mode` were removed from options available via `_experiments`. Use the top-level `profiles_sample_rate` and `profiler_mode` options instead. +- `Transport.capture_event` has been removed. Use `Transport.capture_envelope` instead. +- Function transports are no longer supported. Subclass the `Transport` instead. ### Deprecated diff --git a/sentry_sdk/client.py b/sentry_sdk/client.py index bce5361572..059a752451 100644 --- a/sentry_sdk/client.py +++ b/sentry_sdk/client.py @@ -9,7 +9,6 @@ from datetime import datetime, timezone from importlib import import_module from typing import TYPE_CHECKING, List, Dict, cast, overload -import warnings from sentry_sdk._compat import check_uwsgi_thread_support from sentry_sdk.utils import ( @@ -121,9 +120,6 @@ def _get_options(*args, **kwargs): rv["project_root"] = project_root - if rv["enable_tracing"] is True and rv["traces_sample_rate"] is None: - rv["traces_sample_rate"] = 1.0 - if rv["event_scrubber"] is None: rv["event_scrubber"] = EventScrubber( send_default_pii=( @@ -137,13 +133,6 @@ def _get_options(*args, **kwargs): ) rv["socket_options"] = None - if rv["enable_tracing"] is not None: - warnings.warn( - "The `enable_tracing` parameter is deprecated. Please use `traces_sample_rate` instead.", - DeprecationWarning, - stacklevel=2, - ) - return rv diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py index eee70006fe..7841c4d9b4 100644 --- a/sentry_sdk/consts.py +++ b/sentry_sdk/consts.py @@ -526,7 +526,6 @@ def __init__( proxy_headers=None, # type: Optional[Dict[str, str]] before_send_transaction=None, # type: Optional[TransactionProcessor] project_root=None, # type: Optional[str] - enable_tracing=None, # type: Optional[bool] include_local_variables=True, # type: Optional[bool] include_source_context=True, # type: Optional[bool] trace_propagation_targets=[ # noqa: B006 @@ -915,9 +914,6 @@ def __init__( :param profile_session_sample_rate: - - :param enable_tracing: - :param propagate_traces: :param auto_session_tracking: diff --git a/sentry_sdk/debug.py b/sentry_sdk/debug.py index f740d92dec..32b36a4048 100644 --- a/sentry_sdk/debug.py +++ b/sentry_sdk/debug.py @@ -1,6 +1,5 @@ import sys import logging -import warnings from sentry_sdk import get_client from sentry_sdk.client import _client_init_debug @@ -30,12 +29,3 @@ def configure_logger(): logger.addHandler(_handler) logger.setLevel(logging.DEBUG) logger.addFilter(_DebugFilter()) - - -def configure_debug_hub(): - # type: () -> None - warnings.warn( - "configure_debug_hub is deprecated. Please remove calls to it, as it is a no-op.", - DeprecationWarning, - stacklevel=2, - ) diff --git a/sentry_sdk/profiler/__init__.py b/sentry_sdk/profiler/__init__.py index c146dd3a97..762bd4d9cf 100644 --- a/sentry_sdk/profiler/__init__.py +++ b/sentry_sdk/profiler/__init__.py @@ -2,44 +2,8 @@ start_profiler, stop_profiler, ) -from sentry_sdk.profiler.transaction_profiler import ( - MAX_PROFILE_DURATION_NS, - PROFILE_MINIMUM_SAMPLES, - Profile, - Scheduler, - ThreadScheduler, - GeventScheduler, - has_profiling_enabled, - setup_profiler, - teardown_profiler, -) -from sentry_sdk.profiler.utils import ( - DEFAULT_SAMPLING_FREQUENCY, - MAX_STACK_DEPTH, - get_frame_name, - extract_frame, - extract_stack, - frame_id, -) __all__ = [ "start_profiler", "stop_profiler", - # DEPRECATED: The following was re-exported for backwards compatibility. It - # will be removed from sentry_sdk.profiler in a future release. - "MAX_PROFILE_DURATION_NS", - "PROFILE_MINIMUM_SAMPLES", - "Profile", - "Scheduler", - "ThreadScheduler", - "GeventScheduler", - "has_profiling_enabled", - "setup_profiler", - "teardown_profiler", - "DEFAULT_SAMPLING_FREQUENCY", - "MAX_STACK_DEPTH", - "get_frame_name", - "extract_frame", - "extract_stack", - "frame_id", ] diff --git a/sentry_sdk/profiler/continuous_profiler.py b/sentry_sdk/profiler/continuous_profiler.py index 1619925bd2..371f61c632 100644 --- a/sentry_sdk/profiler/continuous_profiler.py +++ b/sentry_sdk/profiler/continuous_profiler.py @@ -87,15 +87,9 @@ def setup_continuous_profiler(options, sdk_info, capture_func): else: default_profiler_mode = ThreadContinuousScheduler.mode + profiler_mode = default_profiler_mode if options.get("profiler_mode") is not None: profiler_mode = options["profiler_mode"] - else: - # TODO: deprecate this and just use the existing `profiler_mode` - experiments = options.get("_experiments", {}) - - profiler_mode = ( - experiments.get("continuous_profiling_mode") or default_profiler_mode - ) frequency = DEFAULT_SAMPLING_FREQUENCY diff --git a/sentry_sdk/profiler/transaction_profiler.py b/sentry_sdk/profiler/transaction_profiler.py index 626bcabb52..095ce2f2f9 100644 --- a/sentry_sdk/profiler/transaction_profiler.py +++ b/sentry_sdk/profiler/transaction_profiler.py @@ -125,16 +125,6 @@ def has_profiling_enabled(options): if profiles_sample_rate is not None and profiles_sample_rate > 0: return True - profiles_sample_rate = options["_experiments"].get("profiles_sample_rate") - if profiles_sample_rate is not None: - logger.warning( - "_experiments['profiles_sample_rate'] is deprecated. " - "Please use the non-experimental profiles_sample_rate option " - "directly." - ) - if profiles_sample_rate > 0: - return True - return False @@ -157,16 +147,9 @@ def setup_profiler(options): else: default_profiler_mode = ThreadScheduler.mode + profiler_mode = default_profiler_mode if options.get("profiler_mode") is not None: profiler_mode = options["profiler_mode"] - else: - profiler_mode = options.get("_experiments", {}).get("profiler_mode") - if profiler_mode is not None: - logger.warning( - "_experiments['profiler_mode'] is deprecated. Please use the " - "non-experimental profiler_mode option directly." - ) - profiler_mode = profiler_mode or default_profiler_mode if ( profiler_mode == ThreadScheduler.mode @@ -283,12 +266,11 @@ def _set_initial_sampling_decision(self, sampling_context): options = client.options + sample_rate = None if callable(options.get("profiles_sampler")): sample_rate = options["profiles_sampler"](sampling_context) elif options["profiles_sample_rate"] is not None: sample_rate = options["profiles_sample_rate"] - else: - sample_rate = options["_experiments"].get("profiles_sample_rate") # The profiles_sample_rate option was not set, so profiling # was never enabled. diff --git a/sentry_sdk/tracing_utils.py b/sentry_sdk/tracing_utils.py index 11b5361de9..4f88ef26fc 100644 --- a/sentry_sdk/tracing_utils.py +++ b/sentry_sdk/tracing_utils.py @@ -93,17 +93,14 @@ def has_tracing_enabled(options): # type: (Optional[Dict[str, Any]]) -> bool """ Returns True if either traces_sample_rate or traces_sampler is - defined and enable_tracing is set and not false. + defined. """ if options is None: return False return bool( - options.get("enable_tracing") is not False - and ( - options.get("traces_sample_rate") is not None - or options.get("traces_sampler") is not None - ) + options.get("traces_sample_rate") is not None + or options.get("traces_sampler") is not None ) diff --git a/sentry_sdk/transport.py b/sentry_sdk/transport.py index 16957af743..ec0b6b2349 100644 --- a/sentry_sdk/transport.py +++ b/sentry_sdk/transport.py @@ -5,7 +5,6 @@ import socket import ssl import time -import warnings from datetime import datetime, timedelta, timezone from collections import defaultdict from urllib.request import getproxies @@ -40,7 +39,7 @@ from urllib3.poolmanager import PoolManager from urllib3.poolmanager import ProxyManager - from sentry_sdk._types import Event, EventDataCategory + from sentry_sdk._types import EventDataCategory KEEP_ALIVE_SOCKET_OPTIONS = [] for option in [ @@ -73,25 +72,6 @@ def __init__(self, options=None): else: self.parsed_dsn = None - def capture_event(self, event): - # type: (Self, Event) -> None - """ - DEPRECATED: Please use capture_envelope instead. - - This gets invoked with the event dictionary when an event should - be sent to sentry. - """ - - warnings.warn( - "capture_event is deprecated, please use capture_envelope instead!", - DeprecationWarning, - stacklevel=2, - ) - - envelope = Envelope() - envelope.add_event(event) - self.capture_envelope(envelope) - @abstractmethod def capture_envelope(self, envelope): # type: (Self, Envelope) -> None @@ -806,35 +786,6 @@ def _make_pool(self): return httpcore.ConnectionPool(**opts) -class _FunctionTransport(Transport): - """ - DEPRECATED: Users wishing to provide a custom transport should subclass - the Transport class, rather than providing a function. - """ - - def __init__( - self, func # type: Callable[[Event], None] - ): - # type: (...) -> None - Transport.__init__(self) - self._func = func - - def capture_event( - self, event # type: Event - ): - # type: (...) -> None - self._func(event) - return None - - def capture_envelope(self, envelope: Envelope) -> None: - # Since function transports expect to be called with an event, we need - # to iterate over the envelope and call the function for each event, via - # the deprecated capture_event method. - event = envelope.get_event() - if event is not None: - self.capture_event(event) - - def make_transport(options): # type: (Dict[str, Any]) -> Optional[Transport] ref_transport = options["transport"] @@ -850,14 +801,6 @@ def make_transport(options): return ref_transport elif isinstance(ref_transport, type) and issubclass(ref_transport, Transport): transport_cls = ref_transport - elif callable(ref_transport): - warnings.warn( - "Function transports are deprecated and will be removed in a future release." - "Please provide a Transport instance or subclass, instead.", - DeprecationWarning, - stacklevel=2, - ) - return _FunctionTransport(ref_transport) # if a transport class is given only instantiate it if the dsn is not # empty or None diff --git a/tests/conftest.py b/tests/conftest.py index b1badd18ad..3fdbab5e54 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -28,7 +28,7 @@ _installed_integrations, _processed_integrations, ) -from sentry_sdk.profiler import teardown_profiler +from sentry_sdk.profiler.transaction_profiler import teardown_profiler from sentry_sdk.profiler.continuous_profiler import teardown_continuous_profiler from sentry_sdk.transport import Transport from sentry_sdk.utils import reraise diff --git a/tests/integrations/celery/test_celery.py b/tests/integrations/celery/test_celery.py index 2f25e13a60..241c79dc9d 100644 --- a/tests/integrations/celery/test_celery.py +++ b/tests/integrations/celery/test_celery.py @@ -611,7 +611,7 @@ def example_task(): def test_messaging_destination_name_default_exchange( mock_request, routing_key, init_celery, capture_events ): - celery_app = init_celery(enable_tracing=True) + celery_app = init_celery(traces_sample_rate=1.0) events = capture_events() mock_request.delivery_info = {"routing_key": routing_key, "exchange": ""} @@ -635,7 +635,7 @@ def test_messaging_destination_name_nondefault_exchange( that the routing key is the queue name. Other exchanges may not guarantee this behavior. """ - celery_app = init_celery(enable_tracing=True) + celery_app = init_celery(traces_sample_rate=1.0) events = capture_events() mock_request.delivery_info = {"routing_key": "celery", "exchange": "custom"} @@ -650,7 +650,7 @@ def task(): ... def test_messaging_id(init_celery, capture_events): - celery = init_celery(enable_tracing=True) + celery = init_celery(traces_sample_rate=1.0) events = capture_events() @celery.task @@ -664,7 +664,7 @@ def example_task(): ... def test_retry_count_zero(init_celery, capture_events): - celery = init_celery(enable_tracing=True) + celery = init_celery(traces_sample_rate=1.0) events = capture_events() @celery.task() @@ -681,7 +681,7 @@ def task(): ... def test_retry_count_nonzero(mock_request, init_celery, capture_events): mock_request.retries = 3 - celery = init_celery(enable_tracing=True) + celery = init_celery(traces_sample_rate=1.0) events = capture_events() @celery.task() @@ -696,7 +696,7 @@ def task(): ... @pytest.mark.parametrize("system", ("redis", "amqp")) def test_messaging_system(system, init_celery, capture_events): - celery = init_celery(enable_tracing=True) + celery = init_celery(traces_sample_rate=1.0) events = capture_events() # Does not need to be a real URL, since we use always eager @@ -721,7 +721,7 @@ def publish(*args, **kwargs): monkeypatch.setattr(kombu.messaging.Producer, "_publish", publish) - sentry_init(integrations=[CeleryIntegration()], enable_tracing=True) + sentry_init(integrations=[CeleryIntegration()], traces_sample_rate=1.0) celery = Celery(__name__, broker=f"{system}://example.com") # noqa: E231 events = capture_events() @@ -759,7 +759,7 @@ def task(): ... def tests_span_origin_consumer(init_celery, capture_events): - celery = init_celery(enable_tracing=True) + celery = init_celery(traces_sample_rate=1.0) celery.conf.broker_url = "redis://example.com" # noqa: E231 events = capture_events() @@ -783,7 +783,7 @@ def publish(*args, **kwargs): monkeypatch.setattr(kombu.messaging.Producer, "_publish", publish) - sentry_init(integrations=[CeleryIntegration()], enable_tracing=True) + sentry_init(integrations=[CeleryIntegration()], traces_sample_rate=1.0) celery = Celery(__name__, broker="redis://example.com") # noqa: E231 events = capture_events() @@ -812,7 +812,7 @@ def test_send_task_wrapped( capture_events, reset_integrations, ): - sentry_init(integrations=[CeleryIntegration()], enable_tracing=True) + sentry_init(integrations=[CeleryIntegration()], traces_sample_rate=1.0) celery = Celery(__name__, broker="redis://example.com") # noqa: E231 events = capture_events() diff --git a/tests/integrations/celery/test_update_celery_task_headers.py b/tests/integrations/celery/test_update_celery_task_headers.py index 709e49b54a..3eb9bd7e88 100644 --- a/tests/integrations/celery/test_update_celery_task_headers.py +++ b/tests/integrations/celery/test_update_celery_task_headers.py @@ -71,7 +71,7 @@ def test_monitor_beat_tasks_with_headers(monitor_beat_tasks): def test_span_with_transaction(sentry_init): - sentry_init(enable_tracing=True) + sentry_init(traces_sample_rate=1.0) headers = {} monitor_beat_tasks = False @@ -91,7 +91,7 @@ def test_span_with_transaction(sentry_init): def test_span_with_transaction_custom_headers(sentry_init): - sentry_init(enable_tracing=True) + sentry_init(traces_sample_rate=1.0) headers = { "baggage": BAGGAGE_VALUE, "sentry-trace": SENTRY_TRACE_VALUE, @@ -190,39 +190,3 @@ def test_celery_trace_propagation_traces_sample_rate( else: assert "sentry-monitor-start-timestamp-s" not in outgoing_headers assert "sentry-monitor-start-timestamp-s" not in outgoing_headers["headers"] - - -@pytest.mark.parametrize( - "enable_tracing,monitor_beat_tasks", - list(itertools.product([None, True, False], [True, False])), -) -def test_celery_trace_propagation_enable_tracing( - sentry_init, enable_tracing, monitor_beat_tasks -): - """ - The celery integration does not check the traces_sample_rate. - By default traces_sample_rate is None which means "do not propagate traces". - But the celery integration does not check this value. - The Celery integration has its own mechanism to propagate traces: - https://docs.sentry.io/platforms/python/integrations/celery/#distributed-traces - """ - sentry_init(enable_tracing=enable_tracing) - - headers = {} - span = None - - scope = sentry_sdk.get_isolation_scope() - - outgoing_headers = _update_celery_task_headers(headers, span, monitor_beat_tasks) - - assert outgoing_headers["sentry-trace"] == scope.get_traceparent() - assert outgoing_headers["headers"]["sentry-trace"] == scope.get_traceparent() - assert outgoing_headers["baggage"] == scope.get_baggage().serialize() - assert outgoing_headers["headers"]["baggage"] == scope.get_baggage().serialize() - - if monitor_beat_tasks: - assert "sentry-monitor-start-timestamp-s" in outgoing_headers - assert "sentry-monitor-start-timestamp-s" in outgoing_headers["headers"] - else: - assert "sentry-monitor-start-timestamp-s" not in outgoing_headers - assert "sentry-monitor-start-timestamp-s" not in outgoing_headers["headers"] diff --git a/tests/integrations/graphene/test_graphene.py b/tests/integrations/graphene/test_graphene.py index 5d54bb49cb..63bc5de5d2 100644 --- a/tests/integrations/graphene/test_graphene.py +++ b/tests/integrations/graphene/test_graphene.py @@ -207,7 +207,7 @@ def graphql_server_sync(): def test_graphql_span_holds_query_information(sentry_init, capture_events): sentry_init( integrations=[GrapheneIntegration(), FlaskIntegration()], - enable_tracing=True, + traces_sample_rate=1.0, default_integrations=False, ) events = capture_events() diff --git a/tests/integrations/sqlalchemy/test_sqlalchemy.py b/tests/integrations/sqlalchemy/test_sqlalchemy.py index 48390b352e..999b17a19f 100644 --- a/tests/integrations/sqlalchemy/test_sqlalchemy.py +++ b/tests/integrations/sqlalchemy/test_sqlalchemy.py @@ -295,14 +295,12 @@ def test_engine_name_not_string(sentry_init): def test_query_source_disabled(sentry_init, capture_events): - sentry_options = { - "integrations": [SqlalchemyIntegration()], - "enable_tracing": True, - "enable_db_query_source": False, - "db_query_source_threshold_ms": 0, - } - - sentry_init(**sentry_options) + sentry_init( + integrations=[SqlalchemyIntegration()], + traces_sample_rate=1.0, + enable_db_query_source=False, + db_query_source_threshold_ms=0, + ) events = capture_events() @@ -348,7 +346,7 @@ class Person(Base): def test_query_source_enabled(sentry_init, capture_events, enable_db_query_source): sentry_options = { "integrations": [SqlalchemyIntegration()], - "enable_tracing": True, + "traces_sample_rate": 1.0, "db_query_source_threshold_ms": 0, } if enable_db_query_source is not None: @@ -399,7 +397,7 @@ class Person(Base): def test_query_source(sentry_init, capture_events): sentry_init( integrations=[SqlalchemyIntegration()], - enable_tracing=True, + traces_sample_rate=1.0, enable_db_query_source=True, db_query_source_threshold_ms=0, ) @@ -464,7 +462,7 @@ def test_query_source_with_module_in_search_path(sentry_init, capture_events): """ sentry_init( integrations=[SqlalchemyIntegration()], - enable_tracing=True, + traces_sample_rate=1.0, enable_db_query_source=True, db_query_source_threshold_ms=0, ) @@ -527,7 +525,7 @@ class Person(Base): def test_no_query_source_if_duration_too_short(sentry_init, capture_events): sentry_init( integrations=[SqlalchemyIntegration()], - enable_tracing=True, + traces_sample_rate=1.0, enable_db_query_source=True, db_query_source_threshold_ms=100, ) @@ -595,7 +593,7 @@ def __exit__(self, type, value, traceback): def test_query_source_if_duration_over_threshold(sentry_init, capture_events): sentry_init( integrations=[SqlalchemyIntegration()], - enable_tracing=True, + traces_sample_rate=1.0, enable_db_query_source=True, db_query_source_threshold_ms=100, ) diff --git a/tests/profiler/test_transaction_profiler.py b/tests/profiler/test_transaction_profiler.py index 7679831be3..97836d59d9 100644 --- a/tests/profiler/test_transaction_profiler.py +++ b/tests/profiler/test_transaction_profiler.py @@ -39,30 +39,13 @@ def process_test_sample(sample): return [(tid, (stack, stack)) for tid, stack in sample] -def non_experimental_options(mode=None, sample_rate=None): - return {"profiler_mode": mode, "profiles_sample_rate": sample_rate} - - -def experimental_options(mode=None, sample_rate=None): - return { - "_experiments": {"profiler_mode": mode, "profiles_sample_rate": sample_rate} - } - - @pytest.mark.parametrize( "mode", [pytest.param("foo")], ) -@pytest.mark.parametrize( - "make_options", - [ - pytest.param(experimental_options, id="experiment"), - pytest.param(non_experimental_options, id="non experimental"), - ], -) -def test_profiler_invalid_mode(mode, make_options, teardown_profiling): +def test_profiler_invalid_mode(mode, teardown_profiling): with pytest.raises(ValueError): - setup_profiler(make_options(mode)) + setup_profiler({"profiler_mode": mode}) @pytest.mark.parametrize( @@ -73,30 +56,16 @@ def test_profiler_invalid_mode(mode, make_options, teardown_profiling): pytest.param("gevent", marks=requires_gevent), ], ) -@pytest.mark.parametrize( - "make_options", - [ - pytest.param(experimental_options, id="experiment"), - pytest.param(non_experimental_options, id="non experimental"), - ], -) -def test_profiler_valid_mode(mode, make_options, teardown_profiling): +def test_profiler_valid_mode(mode, teardown_profiling): # should not raise any exceptions - setup_profiler(make_options(mode)) + setup_profiler({"profiler_mode": mode}) -@pytest.mark.parametrize( - "make_options", - [ - pytest.param(experimental_options, id="experiment"), - pytest.param(non_experimental_options, id="non experimental"), - ], -) -def test_profiler_setup_twice(make_options, teardown_profiling): +def test_profiler_setup_twice(teardown_profiling): # setting up the first time should return True to indicate success - assert setup_profiler(make_options()) + assert setup_profiler({}) # setting up the second time should return False to indicate no-op - assert not setup_profiler(make_options()) + assert not setup_profiler({}) @pytest.mark.parametrize( @@ -116,13 +85,6 @@ def test_profiler_setup_twice(make_options, teardown_profiling): pytest.param(None, 0, id="profiler not enabled"), ], ) -@pytest.mark.parametrize( - "make_options", - [ - pytest.param(experimental_options, id="experiment"), - pytest.param(non_experimental_options, id="non experimental"), - ], -) @mock.patch("sentry_sdk.profiler.transaction_profiler.PROFILE_MINIMUM_SAMPLES", 0) def test_profiles_sample_rate( sentry_init, @@ -131,15 +93,12 @@ def test_profiles_sample_rate( teardown_profiling, profiles_sample_rate, profile_count, - make_options, mode, ): - options = make_options(mode=mode, sample_rate=profiles_sample_rate) sentry_init( traces_sample_rate=1.0, - profiler_mode=options.get("profiler_mode"), - profiles_sample_rate=options.get("profiles_sample_rate"), - _experiments=options.get("_experiments", {}), + profiler_mode=mode, + profiles_sample_rate=profiles_sample_rate, ) envelopes = capture_envelopes() @@ -211,6 +170,7 @@ def test_profiles_sampler( sentry_init( traces_sample_rate=1.0, profiles_sampler=profiles_sampler, + profiler_mode=mode, ) envelopes = capture_envelopes() @@ -243,7 +203,7 @@ def test_minimum_unique_samples_required( ): sentry_init( traces_sample_rate=1.0, - _experiments={"profiles_sample_rate": 1.0}, + profiles_sample_rate=1.0, ) envelopes = capture_envelopes() @@ -272,7 +232,7 @@ def test_profile_captured( ): sentry_init( traces_sample_rate=1.0, - _experiments={"profiles_sample_rate": 1.0}, + profiles_sample_rate=1.0, ) envelopes = capture_envelopes() diff --git a/tests/test_basics.py b/tests/test_basics.py index 75d9fcd0bc..272f8e9fd9 100644 --- a/tests/test_basics.py +++ b/tests/test_basics.py @@ -33,7 +33,6 @@ from sentry_sdk.integrations.stdlib import StdlibIntegration from sentry_sdk.scope import add_global_event_processor from sentry_sdk.utils import datetime_from_isoformat, get_sdk_name, reraise -from sentry_sdk.tracing_utils import has_tracing_enabled class NoOpIntegration(Integration): @@ -249,32 +248,6 @@ def do_this(): assert crumb["type"] == "default" -@pytest.mark.parametrize( - "enable_tracing, traces_sample_rate, tracing_enabled, updated_traces_sample_rate", - [ - (None, None, False, None), - (False, 0.0, False, 0.0), - (False, 1.0, False, 1.0), - (None, 1.0, True, 1.0), - (True, 1.0, True, 1.0), - (None, 0.0, True, 0.0), # We use this as - it's configured but turned off - (True, 0.0, True, 0.0), # We use this as - it's configured but turned off - (True, None, True, 1.0), - ], -) -def test_option_enable_tracing( - sentry_init, - enable_tracing, - traces_sample_rate, - tracing_enabled, - updated_traces_sample_rate, -): - sentry_init(enable_tracing=enable_tracing, traces_sample_rate=traces_sample_rate) - options = sentry_sdk.get_client().options - assert has_tracing_enabled(options) is tracing_enabled - assert options["traces_sample_rate"] == updated_traces_sample_rate - - def test_breadcrumb_arguments(sentry_init, capture_events): assert_hint = {"bar": 42} @@ -839,7 +812,7 @@ def test_classmethod_tracing(sentry_init): def test_last_event_id(sentry_init): - sentry_init(enable_tracing=True) + sentry_init(traces_sample_rate=1.0) assert last_event_id() is None @@ -849,7 +822,7 @@ def test_last_event_id(sentry_init): def test_last_event_id_transaction(sentry_init): - sentry_init(enable_tracing=True) + sentry_init(traces_sample_rate=1.0) assert last_event_id() is None @@ -860,7 +833,7 @@ def test_last_event_id_transaction(sentry_init): def test_last_event_id_scope(sentry_init): - sentry_init(enable_tracing=True) + sentry_init(traces_sample_rate=1.0) # Should not crash with isolation_scope() as scope: diff --git a/tests/test_client.py b/tests/test_client.py index 3c370f79af..fb547e495a 100644 --- a/tests/test_client.py +++ b/tests/test_client.py @@ -1440,9 +1440,3 @@ def run(self, sentry_init, capture_record_lost_event_calls): ) def test_dropped_transaction(sentry_init, capture_record_lost_event_calls, test_config): test_config.run(sentry_init, capture_record_lost_event_calls) - - -@pytest.mark.parametrize("enable_tracing", [True, False]) -def test_enable_tracing_deprecated(sentry_init, enable_tracing): - with pytest.warns(DeprecationWarning): - sentry_init(enable_tracing=enable_tracing) diff --git a/tests/test_scope.py b/tests/test_scope.py index 1ae1a2fd35..98b9320944 100644 --- a/tests/test_scope.py +++ b/tests/test_scope.py @@ -884,7 +884,7 @@ def test_set_tags(): def test_last_event_id(sentry_init): - sentry_init(enable_tracing=True) + sentry_init(traces_sample_rate=1.0) assert Scope.last_event_id() is None @@ -894,7 +894,7 @@ def test_last_event_id(sentry_init): def test_last_event_id_transaction(sentry_init): - sentry_init(enable_tracing=True) + sentry_init(traces_sample_rate=1.0) assert Scope.last_event_id() is None @@ -905,7 +905,7 @@ def test_last_event_id_transaction(sentry_init): def test_last_event_id_cleared(sentry_init): - sentry_init(enable_tracing=True) + sentry_init(traces_sample_rate=1.0) # Make sure last_event_id is set sentry_sdk.capture_exception(Exception("test")) From cec2cd2103a57648e705d82812545de12876a29e Mon Sep 17 00:00:00 2001 From: Ivana Kellyer Date: Fri, 21 Mar 2025 09:07:52 +0100 Subject: [PATCH 195/244] fix(tracing): Fix `InvalidOperation` (#4179) `InvalidOperation` can occur when using tracing if the `Decimal` class's global context has been modified to set the precision below 6. This change fixes this bug by setting a custom context for our `quantize` call. Fixes #4177 --- sentry_sdk/tracing_utils.py | 9 +++++---- tests/tracing/test_sample_rand.py | 26 ++++++++++++++++++++++++++ 2 files changed, 31 insertions(+), 4 deletions(-) diff --git a/sentry_sdk/tracing_utils.py b/sentry_sdk/tracing_utils.py index 4f88ef26fc..07f8373c68 100644 --- a/sentry_sdk/tracing_utils.py +++ b/sentry_sdk/tracing_utils.py @@ -6,6 +6,7 @@ import uuid from collections.abc import Mapping from datetime import datetime, timedelta, timezone +from decimal import ROUND_DOWN, Context, Decimal from functools import wraps from random import Random from urllib.parse import quote, unquote @@ -699,8 +700,6 @@ def _generate_sample_rand( The pseudorandom number generator is seeded with the trace ID. """ - import decimal - lower, upper = interval if not lower < upper: # using `if lower >= upper` would handle NaNs incorrectly raise ValueError("Invalid interval: lower must be less than upper") @@ -711,8 +710,10 @@ def _generate_sample_rand( sample_rand = rng.uniform(lower, upper) # Round down to exactly six decimal-digit precision. - return decimal.Decimal(sample_rand).quantize( - decimal.Decimal("0.000001"), rounding=decimal.ROUND_DOWN + # Setting the context is needed to avoid an InvalidOperation exception + # in case the user has changed the default precision. + return Decimal(sample_rand).quantize( + Decimal("0.000001"), rounding=ROUND_DOWN, context=Context(prec=6) ) diff --git a/tests/tracing/test_sample_rand.py b/tests/tracing/test_sample_rand.py index 8549921227..fc7d0e2404 100644 --- a/tests/tracing/test_sample_rand.py +++ b/tests/tracing/test_sample_rand.py @@ -1,3 +1,4 @@ +import decimal from unittest import mock import pytest @@ -53,3 +54,28 @@ def test_transaction_uses_incoming_sample_rand( # Transaction event captured if sample_rand < sample_rate, indicating that # sample_rand is used to make the sampling decision. assert len(events) == int(sample_rand < sample_rate) + + +def test_decimal_context(sentry_init, capture_events): + """ + Ensure that having a decimal context with a precision below 6 + does not cause an InvalidOperation exception. + """ + sentry_init(traces_sample_rate=1.0) + events = capture_events() + + old_prec = decimal.getcontext().prec + decimal.getcontext().prec = 2 + + try: + with mock.patch( + "sentry_sdk.tracing_utils.Random.uniform", return_value=0.123456789 + ): + with sentry_sdk.start_transaction() as transaction: + assert ( + transaction.get_baggage().sentry_items["sample_rand"] == "0.123456" + ) + finally: + decimal.getcontext().prec = old_prec + + assert len(events) == 1 From 9a2537752ddcd25ff23d24bb6b2617fe17eafd22 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Fri, 21 Mar 2025 09:39:49 +0100 Subject: [PATCH 196/244] Use `warnings` module for deprecation messagse. (#4180) Fixes #4109 --- sentry_sdk/consts.py | 2 +- sentry_sdk/integrations/opentelemetry/scope.py | 7 +++++-- sentry_sdk/scope.py | 18 ++++++++++++------ sentry_sdk/tracing.py | 7 +++++-- 4 files changed, 23 insertions(+), 11 deletions(-) diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py index 7841c4d9b4..c5fa461c15 100644 --- a/sentry_sdk/consts.py +++ b/sentry_sdk/consts.py @@ -160,7 +160,7 @@ class SPANDATA: AI_TOOL_CALLS = "ai.tool_calls" """ - For an AI model call, the function that was called. This is deprecated for OpenAI, and replaced by tool_calls + For an AI model call, the function that was called. """ AI_TOOLS = "ai.tools" diff --git a/sentry_sdk/integrations/opentelemetry/scope.py b/sentry_sdk/integrations/opentelemetry/scope.py index c04c299e38..56dd129a7f 100644 --- a/sentry_sdk/integrations/opentelemetry/scope.py +++ b/sentry_sdk/integrations/opentelemetry/scope.py @@ -1,5 +1,6 @@ from typing import cast from contextlib import contextmanager +import warnings from opentelemetry.context import ( get_value, @@ -142,8 +143,10 @@ def start_transaction(self, **kwargs): This function is deprecated and will be removed in a future release. Use :py:meth:`sentry_sdk.start_span` instead. """ - logger.warning( - "The `start_transaction` method is deprecated, please use `sentry_sdk.start_span instead.`" + warnings.warn( + "The `start_transaction` method is deprecated, please use `sentry_sdk.start_span instead.`", + DeprecationWarning, + stacklevel=2, ) return self.start_span(**kwargs) diff --git a/sentry_sdk/scope.py b/sentry_sdk/scope.py index 51f95cdeae..1784b6c5b3 100644 --- a/sentry_sdk/scope.py +++ b/sentry_sdk/scope.py @@ -563,8 +563,10 @@ def trace_propagation_meta(self, *args, **kwargs): """ span = kwargs.pop("span", None) if span is not None: - logger.warning( - "The parameter `span` in trace_propagation_meta() is deprecated and will be removed in the future." + warnings.warn( + "The parameter `span` in trace_propagation_meta() is deprecated and will be removed in the future.", + DeprecationWarning, + stacklevel=2, ) meta = "" @@ -735,8 +737,10 @@ def transaction(self, value): # transaction name or transaction (self._span) depending on the type of # the value argument. - logger.warning( - "Assigning to scope.transaction directly is deprecated: use scope.set_transaction_name() instead." + warnings.warn( + "Assigning to scope.transaction directly is deprecated: use scope.set_transaction_name() instead.", + DeprecationWarning, + stacklevel=2, ) self._transaction = value if self._span and self._span.containing_transaction: @@ -954,8 +958,10 @@ def start_transaction(self, **kwargs): This function is deprecated and will be removed in a future release. Use :py:meth:`sentry_sdk.start_span` instead. """ - logger.warning( - "The `start_transaction` method is deprecated, please use `sentry_sdk.start_span instead.`" + warnings.warn( + "The `start_transaction` method is deprecated, please use `sentry_sdk.start_span instead.`", + DeprecationWarning, + stacklevel=2, ) return NoOpSpan(**kwargs) diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py index 359014d961..0e31ad4ff5 100644 --- a/sentry_sdk/tracing.py +++ b/sentry_sdk/tracing.py @@ -1,6 +1,7 @@ from datetime import datetime from enum import Enum import json +import warnings from opentelemetry import trace as otel_trace, context from opentelemetry.trace import ( @@ -476,8 +477,10 @@ def containing_transaction(self): .. deprecated:: 3.0.0 This will be removed in the future. Use :func:`root_span` instead. """ - logger.warning( - "Deprecated: This will be removed in the future. Use root_span instead." + warnings.warn( + "Deprecated: This will be removed in the future. Use root_span instead.", + DeprecationWarning, + stacklevel=2, ) return self.root_span From 0a2d87808b809220c9388d037267d9c792d62528 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Wed, 26 Mar 2025 15:33:56 +0100 Subject: [PATCH 197/244] Fix AWS Lambda tests (#4199) With the refactoring of the AWS Lambda test suite in `master`. We need to update the tests in `potel-base` to succeed again. This also fixes a check in the `stdlib` integration. With local AWS Lambda we have a DSN that includes a port. This did not work and this PR makes this work. (This will allow support for self hosted Sentry running on a specific port too) --- sentry_sdk/integrations/stdlib.py | 2 +- .../TracesSampler/index.py | 16 +----- .../aws_lambda/test_aws_lambda.py | 53 ++++++++++++++----- tests/integrations/aws_lambda/utils.py | 6 +-- 4 files changed, 47 insertions(+), 30 deletions(-) diff --git a/sentry_sdk/integrations/stdlib.py b/sentry_sdk/integrations/stdlib.py index a6db07f48f..0812d31c67 100644 --- a/sentry_sdk/integrations/stdlib.py +++ b/sentry_sdk/integrations/stdlib.py @@ -73,7 +73,7 @@ def putrequest(self, method, url, *args, **kwargs): client = sentry_sdk.get_client() if client.get_integration(StdlibIntegration) is None or is_sentry_url( - client, host + client, f"{host}:{port}" ): return real_putrequest(self, method, url, *args, **kwargs) diff --git a/tests/integrations/aws_lambda/lambda_functions_with_embedded_sdk/TracesSampler/index.py b/tests/integrations/aws_lambda/lambda_functions_with_embedded_sdk/TracesSampler/index.py index ce797faf71..bc2693d9b5 100644 --- a/tests/integrations/aws_lambda/lambda_functions_with_embedded_sdk/TracesSampler/index.py +++ b/tests/integrations/aws_lambda/lambda_functions_with_embedded_sdk/TracesSampler/index.py @@ -4,26 +4,14 @@ from sentry_sdk.integrations.aws_lambda import AwsLambdaIntegration # Global variables to store sampling context for verification -sampling_context_data = { - "aws_event_present": False, - "aws_context_present": False, - "event_data": None, -} +sampling_context_data = None def trace_sampler(sampling_context): # Store the sampling context for verification global sampling_context_data + sampling_context_data = sampling_context - # Check if aws_event and aws_context are in the sampling_context - if "aws_event" in sampling_context: - sampling_context_data["aws_event_present"] = True - sampling_context_data["event_data"] = sampling_context["aws_event"] - - if "aws_context" in sampling_context: - sampling_context_data["aws_context_present"] = True - - print("Sampling context data:", sampling_context_data) return 1.0 # Always sample diff --git a/tests/integrations/aws_lambda/test_aws_lambda.py b/tests/integrations/aws_lambda/test_aws_lambda.py index 85da7e0b14..5f608fcc5a 100644 --- a/tests/integrations/aws_lambda/test_aws_lambda.py +++ b/tests/integrations/aws_lambda/test_aws_lambda.py @@ -67,7 +67,7 @@ def test_environment(): try: # Wait for SAM to be ready - LocalLambdaStack.wait_for_stack() + LocalLambdaStack.wait_for_stack(log_file=debug_log_file) def before_test(): server.clear_envelopes() @@ -137,12 +137,12 @@ def test_basic_no_exception(lambda_client, test_environment): } assert transaction_event["contexts"]["trace"] == { "op": "function.aws", - "description": mock.ANY, "span_id": mock.ANY, "parent_span_id": mock.ANY, "trace_id": mock.ANY, "origin": "auto.function.aws_lambda", "data": mock.ANY, + "status": "ok", } @@ -178,7 +178,6 @@ def test_basic_exception(lambda_client, test_environment): } assert error_event["contexts"]["trace"] == { "op": "function.aws", - "description": mock.ANY, "span_id": mock.ANY, "parent_span_id": mock.ANY, "trace_id": mock.ANY, @@ -314,9 +313,7 @@ def test_non_dict_event( "headers": {"Host": "x1.io", "X-Forwarded-Proto": "https"}, "method": "GET", "url": "https://x1.io/1", - "query_string": { - "done": "f", - }, + "query_string": "done=f", } else: request_data = {"url": "awslambda:///BasicException"} @@ -343,7 +340,8 @@ def test_request_data(lambda_client, test_environment): "X-Forwarded-Proto": "https" }, "queryStringParameters": { - "bonkers": "true" + "bonkers": "true", + "wild": "false" }, "pathParameters": null, "stageVariables": null, @@ -373,7 +371,7 @@ def test_request_data(lambda_client, test_environment): "X-Forwarded-Proto": "https", }, "method": "GET", - "query_string": {"bonkers": "true"}, + "query_string": "bonkers=true&wild=false", "url": "https://iwsz2c7uwi.execute-api.us-east-1.amazonaws.com/asd", } @@ -457,7 +455,19 @@ def test_traces_sampler_has_correct_sampling_context(lambda_client, test_environ Test that aws_event and aws_context are passed in the custom_sampling_context when using the AWS Lambda integration. """ - test_payload = {"test_key": "test_value"} + test_payload = { + "test_key": "test_value", + "httpMethod": "GET", + "queryStringParameters": { + "test_query_param": "test_query_value", + }, + "path": "/test", + "headers": { + "X-Forwarded-Proto": "https", + "Host": "example.com", + "X-Bla": "blabla", + }, + } response = lambda_client.invoke( FunctionName="TracesSampler", Payload=json.dumps(test_payload), @@ -466,9 +476,28 @@ def test_traces_sampler_has_correct_sampling_context(lambda_client, test_environ sampling_context_data = json.loads(response_payload["body"])[ "sampling_context_data" ] - assert sampling_context_data.get("aws_event_present") is True - assert sampling_context_data.get("aws_context_present") is True - assert sampling_context_data.get("event_data", {}).get("test_key") == "test_value" + + assert sampling_context_data == { + "transaction_context": { + "name": "TracesSampler", + "op": "function.aws", + "source": "component", + }, + "http.request.method": "GET", + "url.query": "test_query_param=test_query_value", + "url.path": "/test", + "url.full": "https://example.com/test?test_query_param=test_query_value", + "network.protocol.name": "https", + "server.address": "example.com", + "faas.name": "TracesSampler", + "http.request.header.x-forwarded-proto": "https", + "http.request.header.host": "example.com", + "http.request.header.x-bla": "blabla", + "sentry.op": "function.aws", + "sentry.source": "component", + "parent_sampled": None, + "cloud.provider": "aws", + } @pytest.mark.parametrize( diff --git a/tests/integrations/aws_lambda/utils.py b/tests/integrations/aws_lambda/utils.py index d20c9352e7..3d590390ae 100644 --- a/tests/integrations/aws_lambda/utils.py +++ b/tests/integrations/aws_lambda/utils.py @@ -211,7 +211,7 @@ def __init__(self, scope: Construct, construct_id: str, **kwargs) -> None: ) @classmethod - def wait_for_stack(cls, timeout=60, port=SAM_PORT): + def wait_for_stack(cls, timeout=60, port=SAM_PORT, log_file=None): """ Wait for SAM to be ready, with timeout. """ @@ -219,8 +219,8 @@ def wait_for_stack(cls, timeout=60, port=SAM_PORT): while True: if time.time() - start_time > timeout: raise TimeoutError( - "AWS SAM failed to start within %s seconds. (Maybe Docker is not running?)" - % timeout + "AWS SAM failed to start within %s seconds. (Maybe Docker is not running, or new docker images could not be built in time?) Check the log for more details: %s" + % (timeout, log_file) ) try: From 8128e6eb3c6bbb7069aec84644d71b1c698ecb9d Mon Sep 17 00:00:00 2001 From: Ivana Kellyer Date: Thu, 27 Mar 2025 11:44:35 +0100 Subject: [PATCH 198/244] Fix trytond tests on potel-base (#4205) Fixing this: ``` ==================================== ERRORS ==================================== _________ ERROR collecting tests/integrations/trytond/test_trytond.py __________ ImportError while importing test module '/home/runner/work/sentry-python/sentry-python/tests/integrations/trytond/test_trytond.py'. Hint: make sure your test modules/packages have valid Python names. Traceback: .tox/py3.7-trytond-v5.0.9/lib/python3.7/site-packages/_pytest/python.py:617: in _importtestmodule mod = import_path(self.path, mode=importmode, root=self.config.rootpath) .tox/py3.7-trytond-v5.0.9/lib/python3.7/site-packages/_pytest/pathlib.py:567: in import_path importlib.import_module(module_name) /opt/hostedtoolcache/Python/3.7.17/x64/lib/python3.7/importlib/__init__.py:127: in import_module return _bootstrap._gcd_import(name[level:], package, level) :1006: in _gcd_import ??? :983: in _find_and_load ??? :967: in _find_and_load_unlocked ??? :677: in _load_unlocked ??? .tox/py3.7-trytond-v5.0.9/lib/python3.7/site-packages/_pytest/assertion/rewrite.py:186: in exec_module exec(co, module.__dict__) tests/integrations/trytond/test_trytond.py:11: in from trytond.wsgi import app as trytond_app .tox/py3.7-trytond-v5.0.9/lib/python3.7/site-packages/trytond/wsgi.py:12: in from werkzeug.contrib.fixers import ProxyFix E ModuleNotFoundError: No module named 'werkzeug.contrib' ``` --- scripts/populate_tox/config.py | 2 +- tox.ini | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/scripts/populate_tox/config.py b/scripts/populate_tox/config.py index b0b1a410da..a233886c23 100644 --- a/scripts/populate_tox/config.py +++ b/scripts/populate_tox/config.py @@ -164,7 +164,7 @@ "package": "trytond", "deps": { "*": ["werkzeug"], - "<=5.0": ["werkzeug<1.0"], + "<5.1": ["werkzeug<1.0"], }, }, "typer": { diff --git a/tox.ini b/tox.ini index 1196ecb155..643fa19759 100644 --- a/tox.ini +++ b/tox.ini @@ -10,7 +10,7 @@ # The file (and all resulting CI YAMLs) then need to be regenerated via # "scripts/generate-test-files.sh". # -# Last generated: 2025-03-27T09:29:30.733135+00:00 +# Last generated: 2025-03-27T10:11:24.425788+00:00 [tox] requires = @@ -718,6 +718,7 @@ deps = trytond-v7.0.9: trytond==7.0.9 trytond-v7.4.8: trytond==7.4.8 trytond: werkzeug + trytond-v5.0.9: werkzeug<1.0 typer-v0.15.2: typer==0.15.2 From aaebe61aeb5237f0d408b52124fcb37cd29dc7b2 Mon Sep 17 00:00:00 2001 From: Ivana Kellyer Date: Thu, 27 Mar 2025 11:49:27 +0100 Subject: [PATCH 199/244] Remove old AWS workflow file This is now part of the Cloud test group --- .github/workflows/test-integrations-aws.yml | 116 -------------------- 1 file changed, 116 deletions(-) delete mode 100644 .github/workflows/test-integrations-aws.yml diff --git a/.github/workflows/test-integrations-aws.yml b/.github/workflows/test-integrations-aws.yml deleted file mode 100644 index 0088aa4174..0000000000 --- a/.github/workflows/test-integrations-aws.yml +++ /dev/null @@ -1,116 +0,0 @@ -# Do not edit this YAML file. This file is generated automatically by executing -# python scripts/split_tox_gh_actions/split_tox_gh_actions.py -# The template responsible for it is in -# scripts/split_tox_gh_actions/templates/base.jinja -name: Test AWS -on: - push: - branches: - - master - - release/** - - potel-base - # XXX: We are using `pull_request_target` instead of `pull_request` because we want - # this to run on forks with access to the secrets necessary to run the test suite. - # Prefer to use `pull_request` when possible. - pull_request_target: - types: [labeled, opened, reopened, synchronize] -# Cancel in progress workflows on pull_requests. -# https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value -concurrency: - group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} - cancel-in-progress: true -permissions: - contents: read - # `write` is needed to remove the `Trigger: tests using secrets` label - pull-requests: write -env: - SENTRY_PYTHON_TEST_AWS_ACCESS_KEY_ID: ${{ secrets.SENTRY_PYTHON_TEST_AWS_ACCESS_KEY_ID }} - SENTRY_PYTHON_TEST_AWS_SECRET_ACCESS_KEY: ${{ secrets.SENTRY_PYTHON_TEST_AWS_SECRET_ACCESS_KEY }} - BUILD_CACHE_KEY: ${{ github.sha }} - CACHED_BUILD_PATHS: | - ${{ github.workspace }}/dist-serverless -jobs: - check-permissions: - name: permissions check - runs-on: ubuntu-20.04 - steps: - - uses: actions/checkout@v4.2.2 - with: - persist-credentials: false - - name: Check permissions on PR - if: github.event_name == 'pull_request_target' - run: | - python3 -uS .github/workflows/scripts/trigger_tests_on_label.py \ - --repo-id ${{ github.event.repository.id }} \ - --pr ${{ github.event.number }} \ - --event ${{ github.event.action }} \ - --username "$ARG_USERNAME" \ - --label-names "$ARG_LABEL_NAMES" - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - # these can contain special characters - ARG_USERNAME: ${{ github.event.pull_request.user.login }} - ARG_LABEL_NAMES: ${{ toJSON(github.event.pull_request.labels.*.name) }} - - name: Check permissions on repo branch - if: github.event_name == 'push' - run: true - test-aws-pinned: - name: AWS (pinned) - timeout-minutes: 30 - runs-on: ${{ matrix.os }} - strategy: - fail-fast: false - matrix: - python-version: ["3.9"] - os: [ubuntu-22.04] - needs: check-permissions - steps: - - uses: actions/checkout@v4.2.2 - with: - ref: ${{ github.event.pull_request.head.sha || github.ref }} - - uses: actions/setup-python@v5 - with: - python-version: ${{ matrix.python-version }} - allow-prereleases: true - - name: Setup Test Env - run: | - pip install "coverage[toml]" tox - - name: Erase coverage - run: | - coverage erase - - name: Test aws_lambda pinned - run: | - set -x # print commands that are executed - ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-aws_lambda" - - name: Generate coverage XML - if: ${{ !cancelled() }} - run: | - coverage combine .coverage-sentry-* - coverage xml - - name: Upload coverage to Codecov - if: ${{ !cancelled() }} - uses: codecov/codecov-action@v5.3.1 - with: - token: ${{ secrets.CODECOV_TOKEN }} - files: coverage.xml - # make sure no plugins alter our coverage reports - plugin: noop - verbose: true - - name: Upload test results to Codecov - if: ${{ !cancelled() }} - uses: codecov/test-results-action@v1 - with: - token: ${{ secrets.CODECOV_TOKEN }} - files: .junitxml - verbose: true - check_required_tests: - name: All pinned AWS tests passed - needs: test-aws-pinned - # Always run this, even if a dependent job failed - if: always() - runs-on: ubuntu-20.04 - steps: - - name: Check for failures - if: contains(needs.test-aws-pinned.result, 'failure') || contains(needs.test-aws-pinned.result, 'skipped') - run: | - echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1 From 29838546b953172925456fdcead29b75d2ec819c Mon Sep 17 00:00:00 2001 From: Ivana Kellyer Date: Thu, 27 Mar 2025 13:57:06 +0100 Subject: [PATCH 200/244] Drop `propagate_traces` (#4206) Drop the deprecated `propagate_traces` `init` option in POTel. Important: The Celery integration also has an option called `propagate_traces` -- this should remain unchanged. --------- Co-authored-by: nellaG --- MIGRATION_GUIDE.md | 1 + sentry_sdk/consts.py | 3 --- sentry_sdk/scope.py | 7 ------- tests/tracing/test_integration_tests.py | 14 -------------- 4 files changed, 1 insertion(+), 24 deletions(-) diff --git a/MIGRATION_GUIDE.md b/MIGRATION_GUIDE.md index d57696d910..3d807d795a 100644 --- a/MIGRATION_GUIDE.md +++ b/MIGRATION_GUIDE.md @@ -134,6 +134,7 @@ Looking to upgrade from Sentry SDK 2.x to 3.x? Here's a comprehensive list of wh - Spans no longer have a `description`. Use `name` instead. - Dropped support for Python 3.6. - The `enable_tracing` `init` option has been removed. Configure `traces_sample_rate` directly. +- The `propagate_traces` `init` option has been removed. Use `trace_propagation_targets` instead. - The `custom_sampling_context` parameter of `start_transaction` has been removed. Use `attributes` instead to set key-value pairs of data that should be accessible in the traces sampler. Note that span attributes need to conform to the [OpenTelemetry specification](https://opentelemetry.io/docs/concepts/signals/traces/#attributes), meaning only certain types can be set as values. - The PyMongo integration no longer sets tags. The data is still accessible via span attributes. - The PyMongo integration doesn't set `operation_ids` anymore. The individual IDs (`operation_id`, `request_id`, `session_id`) are now accessible as separate span attributes. diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py index 74d18ce80b..1fc920ac52 100644 --- a/sentry_sdk/consts.py +++ b/sentry_sdk/consts.py @@ -511,7 +511,6 @@ def __init__( debug=None, # type: Optional[bool] attach_stacktrace=False, # type: bool ca_certs=None, # type: Optional[str] - propagate_traces=True, # type: bool traces_sample_rate=None, # type: Optional[float] traces_sampler=None, # type: Optional[TracesSampler] profiles_sample_rate=None, # type: Optional[float] @@ -915,8 +914,6 @@ def __init__( :param profile_session_sample_rate: - :param propagate_traces: - :param auto_session_tracking: :param spotlight: diff --git a/sentry_sdk/scope.py b/sentry_sdk/scope.py index bf4a62ab01..43cb94a32d 100644 --- a/sentry_sdk/scope.py +++ b/sentry_sdk/scope.py @@ -610,13 +610,6 @@ def iter_trace_propagation_headers(self, *args, **kwargs): If no span is given, the trace data is taken from the scope. """ client = self.get_client() - if not client.options.get("propagate_traces"): - warnings.warn( - "The `propagate_traces` parameter is deprecated. Please use `trace_propagation_targets` instead.", - DeprecationWarning, - stacklevel=2, - ) - return span = kwargs.pop("span", None) span = span or self.span diff --git a/tests/tracing/test_integration_tests.py b/tests/tracing/test_integration_tests.py index 6fa9d66964..e6e436dbd2 100644 --- a/tests/tracing/test_integration_tests.py +++ b/tests/tracing/test_integration_tests.py @@ -117,20 +117,6 @@ def test_continue_trace(sentry_init, capture_envelopes, sample_rate): # noqa:N8 assert message_payload["message"] == "hello" -@pytest.mark.parametrize("sample_rate", [0.0, 1.0]) -def test_propagate_traces_deprecation_warning(sentry_init, sample_rate): - sentry_init(traces_sample_rate=sample_rate, propagate_traces=False) - - with start_span(name="hi"): - with start_span() as old_span: - with pytest.warns(DeprecationWarning): - dict( - sentry_sdk.get_current_scope().iter_trace_propagation_headers( - old_span - ) - ) - - @pytest.mark.parametrize("sample_rate", [0.5, 1.0]) def test_dynamic_sampling_head_sdk_creates_dsc( sentry_init, From fc8fa9f6184638373dc57ab21002a2cac1d851c9 Mon Sep 17 00:00:00 2001 From: Ivana Kellyer Date: Thu, 27 Mar 2025 15:58:42 +0100 Subject: [PATCH 201/244] feat(tracing): Port `sample_rand` to POTel (#4106) Port `sample_rand` to `potel-base`. See [spec](https://develop.sentry.dev/sdk/telemetry/traces/#propagated-random-value). There are now two places where a `sample_rand` might be generated: - If we're explicitly propagating with `continue_trace`, we'll [backfill](https://github.com/getsentry/sentry-python/pull/4106/files#diff-7c64294459f5053c93d44e0e33e4e73ffcef0adefcd77ba91f4031aa461a8c42R396-R397) `sample_rand` on the propagation context like on master, either using the incoming one or generating a new one from the incoming `sampled`/`sample_rate`. - Otherwise, we generate a new `sample_rand` [in the Sampler](https://github.com/getsentry/sentry-python/pull/4106/files#diff-59aa7195d955e153b5cdd730f888994996a72eaf5e9ea174335ce961841584a9R194-R213). The generated `sample_rand` is then saved on the trace state. This change fixes most of the failures in the Common test suite. Closes https://github.com/getsentry/sentry-python/issues/4027 --------- Co-authored-by: Daniel Szoke <7881302+szokeasaurusrex@users.noreply.github.com> --- MIGRATION_GUIDE.md | 1 + .../integrations/opentelemetry/consts.py | 3 + .../integrations/opentelemetry/sampler.py | 133 +++++++++++++----- .../integrations/opentelemetry/scope.py | 1 - sentry_sdk/integrations/stdlib.py | 2 +- sentry_sdk/tracing.py | 1 - sentry_sdk/tracing_utils.py | 84 ++++++++++- .../opentelemetry/test_propagator.py | 70 +++++++-- .../opentelemetry/test_sampler.py | 8 +- tests/integrations/stdlib/test_httplib.py | 7 +- tests/test_api.py | 12 +- tests/test_dsc.py | 42 +++--- tests/test_propagationcontext.py | 6 +- tests/tracing/test_integration_tests.py | 2 +- tests/tracing/test_sample_rand.py | 110 +++++++++++++-- tests/tracing/test_sample_rand_propagation.py | 23 +-- tests/tracing/test_sampling.py | 24 +++- 17 files changed, 408 insertions(+), 121 deletions(-) diff --git a/MIGRATION_GUIDE.md b/MIGRATION_GUIDE.md index 3d807d795a..cc35f9134b 100644 --- a/MIGRATION_GUIDE.md +++ b/MIGRATION_GUIDE.md @@ -157,6 +157,7 @@ Looking to upgrade from Sentry SDK 2.x to 3.x? Here's a comprehensive list of wh - `profiles_sample_rate` and `profiler_mode` were removed from options available via `_experiments`. Use the top-level `profiles_sample_rate` and `profiler_mode` options instead. - `Transport.capture_event` has been removed. Use `Transport.capture_envelope` instead. - Function transports are no longer supported. Subclass the `Transport` instead. +- `start_transaction` (`start_span`) no longer takes a `baggage` argument. Use the `continue_trace()` context manager instead to propagate baggage. ### Deprecated diff --git a/sentry_sdk/integrations/opentelemetry/consts.py b/sentry_sdk/integrations/opentelemetry/consts.py index 1585e8d893..d4b2b47768 100644 --- a/sentry_sdk/integrations/opentelemetry/consts.py +++ b/sentry_sdk/integrations/opentelemetry/consts.py @@ -12,9 +12,12 @@ SENTRY_USE_CURRENT_SCOPE_KEY = create_key("sentry_use_current_scope") SENTRY_USE_ISOLATION_SCOPE_KEY = create_key("sentry_use_isolation_scope") +# trace state keys TRACESTATE_SAMPLED_KEY = Baggage.SENTRY_PREFIX + "sampled" TRACESTATE_SAMPLE_RATE_KEY = Baggage.SENTRY_PREFIX + "sample_rate" +TRACESTATE_SAMPLE_RAND_KEY = Baggage.SENTRY_PREFIX + "sample_rand" +# misc OTEL_SENTRY_CONTEXT = "otel" SPAN_ORIGIN = "auto.otel" diff --git a/sentry_sdk/integrations/opentelemetry/sampler.py b/sentry_sdk/integrations/opentelemetry/sampler.py index a257f76f1e..83d647f1d8 100644 --- a/sentry_sdk/integrations/opentelemetry/sampler.py +++ b/sentry_sdk/integrations/opentelemetry/sampler.py @@ -1,4 +1,4 @@ -import random +from decimal import Decimal from typing import cast from opentelemetry import trace @@ -6,10 +6,14 @@ from opentelemetry.trace.span import TraceState import sentry_sdk -from sentry_sdk.tracing_utils import has_tracing_enabled +from sentry_sdk.tracing_utils import ( + _generate_sample_rand, + has_tracing_enabled, +) from sentry_sdk.utils import is_valid_sample_rate, logger from sentry_sdk.integrations.opentelemetry.consts import ( TRACESTATE_SAMPLED_KEY, + TRACESTATE_SAMPLE_RAND_KEY, TRACESTATE_SAMPLE_RATE_KEY, SentrySpanAttribute, ) @@ -70,23 +74,40 @@ def get_parent_sample_rate(parent_context, trace_id): return None -def dropped_result(parent_span_context, attributes, sample_rate=None): - # type: (SpanContext, Attributes, Optional[float]) -> SamplingResult - # these will only be added the first time in a root span sampling decision - # if sample_rate is provided, it'll be updated in trace state - trace_state = parent_span_context.trace_state +def get_parent_sample_rand(parent_context, trace_id): + # type: (Optional[SpanContext], int) -> Optional[Decimal] + if parent_context is None: + return None - if TRACESTATE_SAMPLED_KEY not in trace_state: - trace_state = trace_state.add(TRACESTATE_SAMPLED_KEY, "false") - elif trace_state.get(TRACESTATE_SAMPLED_KEY) == "deferred": - trace_state = trace_state.update(TRACESTATE_SAMPLED_KEY, "false") + is_span_context_valid = parent_context is not None and parent_context.is_valid - if sample_rate is not None: - trace_state = trace_state.update(TRACESTATE_SAMPLE_RATE_KEY, str(sample_rate)) + if is_span_context_valid and parent_context.trace_id == trace_id: + parent_sample_rand = parent_context.trace_state.get(TRACESTATE_SAMPLE_RAND_KEY) + if parent_sample_rand is None: + return None - is_root_span = not ( - parent_span_context.is_valid and not parent_span_context.is_remote + return Decimal(parent_sample_rand) + + return None + + +def dropped_result(span_context, attributes, sample_rate=None, sample_rand=None): + # type: (SpanContext, Attributes, Optional[float], Optional[Decimal]) -> SamplingResult + """ + React to a span getting unsampled and return a DROP SamplingResult. + + Update the trace_state with the effective sampled, sample_rate and sample_rand, + record that we dropped the event for client report purposes, and return + an OTel SamplingResult with Decision.DROP. + + See for more info about OTel sampling: + https://opentelemetry-python.readthedocs.io/en/latest/sdk/trace.sampling.html + """ + trace_state = _update_trace_state( + span_context, sampled=False, sample_rate=sample_rate, sample_rand=sample_rand ) + + is_root_span = not (span_context.is_valid and not span_context.is_remote) if is_root_span: # Tell Sentry why we dropped the transaction/root-span client = sentry_sdk.get_client() @@ -108,19 +129,20 @@ def dropped_result(parent_span_context, attributes, sample_rate=None): ) -def sampled_result(span_context, attributes, sample_rate): - # type: (SpanContext, Attributes, Optional[float]) -> SamplingResult - # these will only be added the first time in a root span sampling decision - # if sample_rate is provided, it'll be updated in trace state - trace_state = span_context.trace_state +def sampled_result(span_context, attributes, sample_rate=None, sample_rand=None): + # type: (SpanContext, Attributes, Optional[float], Optional[Decimal]) -> SamplingResult + """ + React to a span being sampled and return a sampled SamplingResult. - if TRACESTATE_SAMPLED_KEY not in trace_state: - trace_state = trace_state.add(TRACESTATE_SAMPLED_KEY, "true") - elif trace_state.get(TRACESTATE_SAMPLED_KEY) == "deferred": - trace_state = trace_state.update(TRACESTATE_SAMPLED_KEY, "true") + Update the trace_state with the effective sampled, sample_rate and sample_rand, + and return an OTel SamplingResult with Decision.RECORD_AND_SAMPLE. - if sample_rate is not None: - trace_state = trace_state.update(TRACESTATE_SAMPLE_RATE_KEY, str(sample_rate)) + See for more info about OTel sampling: + https://opentelemetry-python.readthedocs.io/en/latest/sdk/trace.sampling.html + """ + trace_state = _update_trace_state( + span_context, sampled=True, sample_rate=sample_rate, sample_rand=sample_rand + ) return SamplingResult( Decision.RECORD_AND_SAMPLE, @@ -129,6 +151,27 @@ def sampled_result(span_context, attributes, sample_rate): ) +def _update_trace_state(span_context, sampled, sample_rate=None, sample_rand=None): + # type: (SpanContext, bool, Optional[float], Optional[Decimal]) -> TraceState + trace_state = span_context.trace_state + + sampled = "true" if sampled else "false" + if TRACESTATE_SAMPLED_KEY not in trace_state: + trace_state = trace_state.add(TRACESTATE_SAMPLED_KEY, sampled) + elif trace_state.get(TRACESTATE_SAMPLED_KEY) == "deferred": + trace_state = trace_state.update(TRACESTATE_SAMPLED_KEY, sampled) + + if sample_rate is not None: + trace_state = trace_state.update(TRACESTATE_SAMPLE_RATE_KEY, str(sample_rate)) + + if sample_rand is not None: + trace_state = trace_state.update( + TRACESTATE_SAMPLE_RAND_KEY, f"{sample_rand:.6f}" # noqa: E231 + ) + + return trace_state + + class SentrySampler(Sampler): def should_sample( self, @@ -156,6 +199,18 @@ def should_sample( sample_rate = None + parent_sampled = get_parent_sampled(parent_span_context, trace_id) + parent_sample_rate = get_parent_sample_rate(parent_span_context, trace_id) + parent_sample_rand = get_parent_sample_rand(parent_span_context, trace_id) + + if parent_sample_rand is not None: + # We have a sample_rand on the incoming trace or we already backfilled + # it in PropagationContext + sample_rand = parent_sample_rand + else: + # We are the head SDK and we need to generate a new sample_rand + sample_rand = cast(Decimal, _generate_sample_rand(str(trace_id), (0, 1))) + # Explicit sampled value provided at start_span custom_sampled = cast( "Optional[bool]", attributes.get(SentrySpanAttribute.CUSTOM_SAMPLED) @@ -165,11 +220,17 @@ def should_sample( sample_rate = float(custom_sampled) if sample_rate > 0: return sampled_result( - parent_span_context, attributes, sample_rate=sample_rate + parent_span_context, + attributes, + sample_rate=sample_rate, + sample_rand=sample_rand, ) else: return dropped_result( - parent_span_context, attributes, sample_rate=sample_rate + parent_span_context, + attributes, + sample_rate=sample_rate, + sample_rand=sample_rand, ) else: logger.debug( @@ -190,8 +251,6 @@ def should_sample( sample_rate_to_propagate = sample_rate else: # Check if there is a parent with a sampling decision - parent_sampled = get_parent_sampled(parent_span_context, trace_id) - parent_sample_rate = get_parent_sample_rate(parent_span_context, trace_id) if parent_sampled is not None: sample_rate = bool(parent_sampled) sample_rate_to_propagate = ( @@ -215,17 +274,23 @@ def should_sample( if client.monitor.downsample_factor > 0: sample_rate_to_propagate = sample_rate - # Roll the dice on sample rate + # Compare sample_rand to sample_rate to make the final sampling decision sample_rate = float(cast("Union[bool, float, int]", sample_rate)) - sampled = random.random() < sample_rate + sampled = sample_rand < sample_rate if sampled: return sampled_result( - parent_span_context, attributes, sample_rate=sample_rate_to_propagate + parent_span_context, + attributes, + sample_rate=sample_rate_to_propagate, + sample_rand=None if sample_rand == parent_sample_rand else sample_rand, ) else: return dropped_result( - parent_span_context, attributes, sample_rate=sample_rate_to_propagate + parent_span_context, + attributes, + sample_rate=sample_rate_to_propagate, + sample_rand=None if sample_rand == parent_sample_rand else sample_rand, ) def get_description(self) -> str: diff --git a/sentry_sdk/integrations/opentelemetry/scope.py b/sentry_sdk/integrations/opentelemetry/scope.py index 56dd129a7f..2cd734bcdd 100644 --- a/sentry_sdk/integrations/opentelemetry/scope.py +++ b/sentry_sdk/integrations/opentelemetry/scope.py @@ -30,7 +30,6 @@ from sentry_sdk.integrations.opentelemetry.utils import trace_state_from_baggage from sentry_sdk.scope import Scope, ScopeType from sentry_sdk.tracing import Span -from sentry_sdk.utils import logger from sentry_sdk._types import TYPE_CHECKING if TYPE_CHECKING: diff --git a/sentry_sdk/integrations/stdlib.py b/sentry_sdk/integrations/stdlib.py index 0812d31c67..49313bb0a5 100644 --- a/sentry_sdk/integrations/stdlib.py +++ b/sentry_sdk/integrations/stdlib.py @@ -73,7 +73,7 @@ def putrequest(self, method, url, *args, **kwargs): client = sentry_sdk.get_client() if client.get_integration(StdlibIntegration) is None or is_sentry_url( - client, f"{host}:{port}" + client, f"{host}:{port}" # noqa: E231 ): return real_putrequest(self, method, url, *args, **kwargs) diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py index 0e31ad4ff5..010f2a3d2a 100644 --- a/sentry_sdk/tracing.py +++ b/sentry_sdk/tracing.py @@ -20,7 +20,6 @@ from sentry_sdk.utils import ( _serialize_span_attribute, get_current_thread_meta, - logger, should_be_treated_as_error, ) diff --git a/sentry_sdk/tracing_utils.py b/sentry_sdk/tracing_utils.py index 07f8373c68..4bc7c6aeff 100644 --- a/sentry_sdk/tracing_utils.py +++ b/sentry_sdk/tracing_utils.py @@ -1,4 +1,5 @@ import contextlib +import decimal import inspect import os import re @@ -392,6 +393,9 @@ def from_incoming_data(cls, incoming_data): propagation_context = PropagationContext() propagation_context.update(sentrytrace_data) + if propagation_context is not None: + propagation_context._fill_sample_rand() + return propagation_context @property @@ -433,6 +437,78 @@ def update(self, other_dict): except AttributeError: pass + def _fill_sample_rand(self): + # type: () -> None + """ + Ensure that there is a valid sample_rand value in the baggage. + + If there is a valid sample_rand value in the baggage, we keep it. + Otherwise, we generate a sample_rand value according to the following: + + - If we have a parent_sampled value and a sample_rate in the DSC, we compute + a sample_rand value randomly in the range: + - [0, sample_rate) if parent_sampled is True, + - or, in the range [sample_rate, 1) if parent_sampled is False. + + - If either parent_sampled or sample_rate is missing, we generate a random + value in the range [0, 1). + + The sample_rand is deterministically generated from the trace_id, if present. + + This function does nothing if there is no dynamic_sampling_context. + """ + if self.dynamic_sampling_context is None or self.baggage is None: + return + + sentry_baggage = self.baggage.sentry_items + + sample_rand = None + if sentry_baggage.get("sample_rand"): + try: + sample_rand = Decimal(sentry_baggage["sample_rand"]) + except Exception: + logger.debug( + f"Failed to convert incoming sample_rand to Decimal: {sample_rand}" + ) + + if sample_rand is not None and 0 <= sample_rand < 1: + # sample_rand is present and valid, so don't overwrite it + return + + sample_rate = None + if sentry_baggage.get("sample_rate"): + try: + sample_rate = float(sentry_baggage["sample_rate"]) + except Exception: + logger.debug( + f"Failed to convert incoming sample_rate to float: {sample_rate}" + ) + + lower, upper = _sample_rand_range(self.parent_sampled, sample_rate) + + try: + sample_rand = _generate_sample_rand(self.trace_id, interval=(lower, upper)) + except ValueError: + # ValueError is raised if the interval is invalid, i.e. lower >= upper. + # lower >= upper might happen if the incoming trace's sampled flag + # and sample_rate are inconsistent, e.g. sample_rate=0.0 but sampled=True. + # We cannot generate a sensible sample_rand value in this case. + logger.debug( + f"Could not backfill sample_rand, since parent_sampled={self.parent_sampled} " + f"and sample_rate={sample_rate}." + ) + return + + self.baggage.sentry_items["sample_rand"] = f"{sample_rand:.6f}" # noqa: E231 + + def _sample_rand(self): + # type: () -> Optional[str] + """Convenience method to get the sample_rand value from the baggage.""" + if self.baggage is None: + return None + + return self.baggage.sentry_items.get("sample_rand") + def __repr__(self): # type: (...) -> str return "".format( @@ -684,13 +760,11 @@ def get_current_span(scope=None): return current_span -# XXX-potel-ivana: use this def _generate_sample_rand( trace_id, # type: Optional[str] - *, interval=(0.0, 1.0), # type: tuple[float, float] ): - # type: (...) -> Any + # type: (...) -> Optional[decimal.Decimal] """Generate a sample_rand value from a trace ID. The generated value will be pseudorandomly chosen from the provided @@ -709,15 +783,11 @@ def _generate_sample_rand( while sample_rand >= upper: sample_rand = rng.uniform(lower, upper) - # Round down to exactly six decimal-digit precision. - # Setting the context is needed to avoid an InvalidOperation exception - # in case the user has changed the default precision. return Decimal(sample_rand).quantize( Decimal("0.000001"), rounding=ROUND_DOWN, context=Context(prec=6) ) -# XXX-potel-ivana: use this def _sample_rand_range(parent_sampled, sample_rate): # type: (Optional[bool], Optional[float]) -> tuple[float, float] """ diff --git a/tests/integrations/opentelemetry/test_propagator.py b/tests/integrations/opentelemetry/test_propagator.py index 46f4250771..b18e3bc400 100644 --- a/tests/integrations/opentelemetry/test_propagator.py +++ b/tests/integrations/opentelemetry/test_propagator.py @@ -1,6 +1,6 @@ -import pytest +from unittest.mock import MagicMock, patch -from unittest.mock import MagicMock +import pytest from opentelemetry.trace.propagation import get_current_span from opentelemetry.propagators.textmap import DefaultSetter @@ -139,11 +139,47 @@ def test_inject_continue_trace(sentry_init): "HTTP_BAGGAGE": baggage, } + expected_baggage = baggage + ",sentry-sample_rand=0.001111" + + with patch( + "sentry_sdk.tracing_utils.Random.uniform", + return_value=0.001111, + ): + with sentry_sdk.continue_trace(incoming_headers): + with sentry_sdk.start_span(name="foo") as span: + SentryPropagator().inject(carrier, setter=setter) + assert carrier["sentry-trace"] == f"{trace_id}-{span.span_id}-1" + assert carrier["baggage"] == SortedBaggage(expected_baggage) + + +def test_inject_continue_trace_incoming_sample_rand(sentry_init): + sentry_init(traces_sample_rate=1.0) + + carrier = {} + setter = DefaultSetter() + + trace_id = "771a43a4192642f0b136d5159a501700" + sentry_trace = "771a43a4192642f0b136d5159a501700-1234567890abcdef-1" + baggage = ( + "sentry-trace_id=771a43a4192642f0b136d5159a501700," + "sentry-public_key=frontendpublickey," + "sentry-sample_rate=0.01337," + "sentry-sampled=true," + "sentry-release=myfrontend," + "sentry-environment=bird," + "sentry-transaction=bar," + "sentry-sample_rand=0.002849" + ) + incoming_headers = { + "HTTP_SENTRY_TRACE": sentry_trace, + "HTTP_BAGGAGE": baggage, + } + with sentry_sdk.continue_trace(incoming_headers): with sentry_sdk.start_span(name="foo") as span: SentryPropagator().inject(carrier, setter=setter) - assert (carrier["sentry-trace"]) == f"{trace_id}-{span.span_id}-1" - assert (carrier["baggage"]) == SortedBaggage(baggage) + assert carrier["sentry-trace"] == f"{trace_id}-{span.span_id}-1" + assert carrier["baggage"] == SortedBaggage(baggage) def test_inject_head_sdk(sentry_init): @@ -152,9 +188,23 @@ def test_inject_head_sdk(sentry_init): carrier = {} setter = DefaultSetter() - with sentry_sdk.start_span(name="foo") as span: - SentryPropagator().inject(carrier, setter=setter) - assert (carrier["sentry-trace"]) == f"{span.trace_id}-{span.span_id}-1" - assert (carrier["baggage"]) == SortedBaggage( - f"sentry-transaction=foo,sentry-release=release,sentry-environment=production,sentry-trace_id={span.trace_id},sentry-sample_rate=1.0,sentry-sampled=true" # noqa: E231 - ) + expected_baggage = ( + "sentry-transaction=foo," + "sentry-release=release," + "sentry-environment=production," + "sentry-trace_id={trace_id}," + "sentry-sample_rate=1.0," + "sentry-sampled=true," + "sentry-sample_rand=0.111111" + ) + + with patch( + "sentry_sdk.tracing_utils.Random.uniform", + return_value=0.111111, + ): + with sentry_sdk.start_span(name="foo") as span: + SentryPropagator().inject(carrier, setter=setter) + assert carrier["sentry-trace"] == f"{span.trace_id}-{span.span_id}-1" + assert carrier["baggage"] == SortedBaggage( + expected_baggage.format(trace_id=span.trace_id) + ) diff --git a/tests/integrations/opentelemetry/test_sampler.py b/tests/integrations/opentelemetry/test_sampler.py index 9e67eb7921..8cccab05be 100644 --- a/tests/integrations/opentelemetry/test_sampler.py +++ b/tests/integrations/opentelemetry/test_sampler.py @@ -71,13 +71,17 @@ def test_sampling_traces_sample_rate_50(sentry_init, capture_envelopes): envelopes = capture_envelopes() - with mock.patch("random.random", return_value=0.2): # drop + with mock.patch( + "sentry_sdk.tracing_utils.Random.uniform", return_value=0.2 + ): # drop with sentry_sdk.start_span(description="request a"): with sentry_sdk.start_span(description="cache a"): with sentry_sdk.start_span(description="db a"): ... - with mock.patch("random.random", return_value=0.7): # keep + with mock.patch( + "sentry_sdk.tracing_utils.Random.uniform", return_value=0.7 + ): # keep with sentry_sdk.start_span(description="request b"): with sentry_sdk.start_span(description="cache b"): with sentry_sdk.start_span(description="db b"): diff --git a/tests/integrations/stdlib/test_httplib.py b/tests/integrations/stdlib/test_httplib.py index baf12ca7d2..5f6d57998b 100644 --- a/tests/integrations/stdlib/test_httplib.py +++ b/tests/integrations/stdlib/test_httplib.py @@ -206,7 +206,7 @@ def test_outgoing_trace_headers( "baggage": ( "other-vendor-value-1=foo;bar;baz, sentry-trace_id=771a43a4192642f0b136d5159a501700, " "sentry-public_key=49d0f7386ad645858ae85020e393bef3, sentry-sample_rate=0.01337, " - "sentry-user_id=Am%C3%A9lie, sentry-sample_rand=0.132521102938283, other-vendor-value-2=foo;bar;" + "sentry-user_id=Am%C3%A9lie, sentry-sample_rand=0.003370, other-vendor-value-2=foo;bar;" ), } @@ -231,9 +231,10 @@ def test_outgoing_trace_headers( expected_outgoing_baggage = ( "sentry-trace_id=771a43a4192642f0b136d5159a501700," "sentry-public_key=49d0f7386ad645858ae85020e393bef3," - "sentry-sample_rate=1.0," + "sentry-sample_rate=0.01337," "sentry-user_id=Am%C3%A9lie," - "sentry-sample_rand=0.132521102938283" + "sentry-sample_rand=0.003370," + "sentry-sampled=true" ) assert request_headers["baggage"] == SortedBaggage(expected_outgoing_baggage) diff --git a/tests/test_api.py b/tests/test_api.py index 612a092a73..ae88791f96 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -1,6 +1,5 @@ import pytest -import re from unittest import mock from sentry_sdk import ( @@ -94,11 +93,10 @@ def test_baggage_with_tracing_disabled(sentry_init): @pytest.mark.forked def test_baggage_with_tracing_enabled(sentry_init): sentry_init(traces_sample_rate=1.0, release="1.0.0", environment="dev") - with start_span(name="foo") as span: - expected_baggage_re = r"^sentry-transaction=foo,sentry-trace_id={},sentry-sample_rand=0\.\d{{6}},sentry-environment=dev,sentry-release=1\.0\.0,sentry-sample_rate=1\.0,sentry-sampled={}$".format( - span.trace_id, "true" if span.sampled else "false" - ) - assert re.match(expected_baggage_re, get_baggage()) + with mock.patch("sentry_sdk.tracing_utils.Random.uniform", return_value=0.111111): + with start_span(name="foo") as span: + expected_baggage = f"sentry-transaction=foo,sentry-trace_id={span.trace_id},sentry-sample_rand=0.111111,sentry-environment=dev,sentry-release=1.0.0,sentry-sample_rate=1.0,sentry-sampled=true" # noqa: E231 + assert get_baggage() == SortedBaggage(expected_baggage) @pytest.mark.forked @@ -112,7 +110,7 @@ def test_continue_trace(sentry_init): with continue_trace( { "sentry-trace": "{}-{}-{}".format(trace_id, parent_span_id, parent_sampled), - "baggage": "sentry-trace_id=566e3688a61d4bc888951642d6f14a19,sentry-sample_rand=0.123456", + "baggage": "sentry-trace_id=566e3688a61d4bc888951642d6f14a19,sentry-sample_rand=0.123456", # noqa: E231 }, ): with start_span(name="some name") as span: diff --git a/tests/test_dsc.py b/tests/test_dsc.py index 9698bcd8d0..ea3c0b8988 100644 --- a/tests/test_dsc.py +++ b/tests/test_dsc.py @@ -8,7 +8,6 @@ This is not tested in this file. """ -import random from unittest import mock import pytest @@ -176,10 +175,10 @@ def my_traces_sampler(sampling_context): } # We continue the incoming trace and start a new transaction - monkeypatch.setattr(random, "random", lambda: 0.125) - with sentry_sdk.continue_trace(incoming_http_headers): - with sentry_sdk.start_span(name="foo"): - pass + with mock.patch("sentry_sdk.tracing_utils.Random.uniform", return_value=0.125): + with sentry_sdk.continue_trace(incoming_http_headers): + with sentry_sdk.start_span(name="foo"): + pass assert len(envelopes) == 1 @@ -231,7 +230,7 @@ def my_traces_sampler(sampling_context): # The result of the local traces sampler. # "local_traces_sample_rate": # The `traces_sample_rate` setting in the local `sentry_init` call. - ( + ( # 1 traces_sample_rate does not override incoming { "incoming_sample_rate": 1.0, "incoming_sampled": "true", @@ -243,7 +242,7 @@ def my_traces_sampler(sampling_context): 1.0, # expected_sample_rate "true", # expected_sampled ), - ( + ( # 2 traces_sampler overrides incoming { "incoming_sample_rate": 1.0, "incoming_sampled": "true", @@ -255,7 +254,7 @@ def my_traces_sampler(sampling_context): 0.5, # expected_sample_rate "true", # expected_sampled ), - ( + ( # 3 traces_sample_rate does not overrides incoming sample rate or parent (incoming not sampled) { "incoming_sample_rate": 1.0, "incoming_sampled": "false", @@ -267,19 +266,19 @@ def my_traces_sampler(sampling_context): None, # expected_sample_rate "tracing-disabled-no-transactions-should-be-sent", # expected_sampled (because the parent sampled is 0) ), - ( + ( # 4 traces_sampler overrides incoming (incoming not sampled) { - "incoming_sample_rate": 1.0, + "incoming_sample_rate": 0.3, "incoming_sampled": "false", "sentry_trace_header_parent_sampled": 0, "use_local_traces_sampler": True, - "local_traces_sampler_result": 0.5, + "local_traces_sampler_result": 0.25, "local_traces_sample_rate": 0.7, }, - 0.5, # expected_sample_rate + 0.25, # expected_sample_rate "false", # expected_sampled (traces sampler can override parent sampled) ), - ( + ( # 5 forwarding incoming (traces_sample_rate not set) { "incoming_sample_rate": 1.0, "incoming_sampled": "true", @@ -291,7 +290,7 @@ def my_traces_sampler(sampling_context): None, # expected_sample_rate "tracing-disabled-no-transactions-should-be-sent", # expected_sampled (traces_sample_rate=None disables all transaction creation) ), - ( + ( # 6 traces_sampler overrides incoming (traces_sample_rate not set) { "incoming_sample_rate": 1.0, "incoming_sampled": "true", @@ -303,7 +302,7 @@ def my_traces_sampler(sampling_context): 0.5, # expected_sample_rate "true", # expected_sampled (traces sampler overrides the traces_sample_rate setting, so transactions are created) ), - ( + ( # 7 forwarding incoming (traces_sample_rate not set) (incoming not sampled) { "incoming_sample_rate": 1.0, "incoming_sampled": "false", @@ -315,19 +314,19 @@ def my_traces_sampler(sampling_context): None, # expected_sample_rate "tracing-disabled-no-transactions-should-be-sent", # expected_sampled (traces_sample_rate=None disables all transaction creation) ), - ( + ( # 8 traces_sampler overrides incoming (traces_sample_rate not set) (incoming not sampled) { - "incoming_sample_rate": 1.0, + "incoming_sample_rate": 0.3, "incoming_sampled": "false", "sentry_trace_header_parent_sampled": 0, "use_local_traces_sampler": True, - "local_traces_sampler_result": 0.5, + "local_traces_sampler_result": 0.25, "local_traces_sample_rate": None, }, - 0.5, # expected_sample_rate + 0.25, # expected_sample_rate "false", # expected_sampled ), - ( + ( # 9 traces_sample_rate overrides incoming (upstream deferred sampling decision) { "incoming_sample_rate": 1.0, "incoming_sampled": None, @@ -405,7 +404,7 @@ def my_traces_sampler(sampling_context): } # We continue the incoming trace and start a new transaction - with mock.patch.object(random, "random", return_value=0.2): + with mock.patch("sentry_sdk.tracing_utils.Random.uniform", return_value=0.2): with sentry_sdk.continue_trace(incoming_http_headers): with sentry_sdk.start_span(name="foo"): pass @@ -413,6 +412,7 @@ def my_traces_sampler(sampling_context): if expected_sampled == "tracing-disabled-no-transactions-should-be-sent": assert len(envelopes) == 0 else: + assert len(envelopes) == 1 transaction_envelope = envelopes[0] dsc_in_envelope_header = transaction_envelope.headers["trace"] diff --git a/tests/test_propagationcontext.py b/tests/test_propagationcontext.py index c8749027e4..797a18cecd 100644 --- a/tests/test_propagationcontext.py +++ b/tests/test_propagationcontext.py @@ -104,11 +104,11 @@ def test_update(): def test_existing_sample_rand_kept(): ctx = PropagationContext( trace_id="00000000000000000000000000000000", - dynamic_sampling_context={"sample_rand": "0.5"}, + baggage=Baggage(sentry_items={"sample_rand": "0.5"}), ) - # If sample_rand was regenerated, the value would be 0.919221 based on the trace_id assert ctx.dynamic_sampling_context["sample_rand"] == "0.5" + assert ctx.baggage.sentry_items["sample_rand"] == "0.5" @pytest.mark.parametrize( @@ -158,7 +158,7 @@ def mock_random_class(seed): ) assert ( - ctx.dynamic_sampling_context["sample_rand"] + ctx.dynamic_sampling_context.get("sample_rand") == f"{expected_interval[0]:.6f}" # noqa: E231 ) assert mock_uniform.call_count == 1 diff --git a/tests/tracing/test_integration_tests.py b/tests/tracing/test_integration_tests.py index e6e436dbd2..df6cf57e29 100644 --- a/tests/tracing/test_integration_tests.py +++ b/tests/tracing/test_integration_tests.py @@ -227,7 +227,7 @@ def test_trace_propagation_meta_head_sdk(sentry_init): assert 'meta name="baggage"' in baggage baggage_content = re.findall('content="([^"]*)"', baggage)[0] - assert baggage_content == root_span.get_baggage().serialize() + assert SortedBaggage(baggage_content) == root_span.get_baggage().serialize() @pytest.mark.parametrize( diff --git a/tests/tracing/test_sample_rand.py b/tests/tracing/test_sample_rand.py index fc7d0e2404..38a0fe05a2 100644 --- a/tests/tracing/test_sample_rand.py +++ b/tests/tracing/test_sample_rand.py @@ -4,7 +4,7 @@ import pytest import sentry_sdk -from sentry_sdk.tracing_utils import Baggage +from sentry_sdk.tracing import BAGGAGE_HEADER_NAME, SENTRY_TRACE_HEADER_NAME @pytest.mark.parametrize("sample_rand", (0.0, 0.25, 0.5, 0.75)) @@ -40,16 +40,20 @@ def test_transaction_uses_incoming_sample_rand( """ Test that the transaction uses the sample_rand value from the incoming baggage. """ - baggage = Baggage(sentry_items={"sample_rand": f"{sample_rand:.6f}"}) # noqa: E231 - sentry_init(traces_sample_rate=sample_rate) events = capture_events() - with sentry_sdk.start_span(baggage=baggage) as root_span: - assert ( - root_span.get_baggage().sentry_items["sample_rand"] - == f"{sample_rand:.6f}" # noqa: E231 - ) + baggage = f"sentry-sample_rand={sample_rand:.6f},sentry-trace_id=771a43a4192642f0b136d5159a501700" # noqa: E231 + sentry_trace = "771a43a4192642f0b136d5159a501700-1234567890abcdef" + + with sentry_sdk.continue_trace( + {BAGGAGE_HEADER_NAME: baggage, SENTRY_TRACE_HEADER_NAME: sentry_trace} + ): + with sentry_sdk.start_span() as root_span: + assert ( + root_span.get_baggage().sentry_items["sample_rand"] + == f"{sample_rand:.6f}" # noqa: E231 + ) # Transaction event captured if sample_rand < sample_rate, indicating that # sample_rand is used to make the sampling decision. @@ -71,11 +75,93 @@ def test_decimal_context(sentry_init, capture_events): with mock.patch( "sentry_sdk.tracing_utils.Random.uniform", return_value=0.123456789 ): - with sentry_sdk.start_transaction() as transaction: - assert ( - transaction.get_baggage().sentry_items["sample_rand"] == "0.123456" - ) + with sentry_sdk.start_span() as root_span: + assert root_span.get_baggage().sentry_items["sample_rand"] == "0.123456" finally: decimal.getcontext().prec = old_prec assert len(events) == 1 + + +@pytest.mark.parametrize( + "incoming_sample_rand,expected_sample_rand", + ( + ("0.0100015", "0.0100015"), + ("0.1", "0.1"), + ), +) +def test_unexpected_incoming_sample_rand_precision( + sentry_init, capture_events, incoming_sample_rand, expected_sample_rand +): + """ + Test that incoming sample_rand is correctly interpreted even if it looks unexpected. + + We shouldn't be getting arbitrary precision sample_rand in incoming headers, + but if we do for some reason, check that we don't tamper with it. + """ + sentry_init(traces_sample_rate=1.0) + events = capture_events() + + baggage = f"sentry-sample_rand={incoming_sample_rand},sentry-trace_id=771a43a4192642f0b136d5159a501700" # noqa: E231 + sentry_trace = "771a43a4192642f0b136d5159a501700-1234567890abcdef" + + with sentry_sdk.continue_trace( + {BAGGAGE_HEADER_NAME: baggage, SENTRY_TRACE_HEADER_NAME: sentry_trace} + ): + with sentry_sdk.start_span() as root_span: + assert ( + root_span.get_baggage().sentry_items["sample_rand"] + == expected_sample_rand + ) + + assert len(events) == 1 + + +@pytest.mark.parametrize( + "incoming_sample_rand", + ("abc", "null", "47"), +) +def test_invalid_incoming_sample_rand(sentry_init, incoming_sample_rand): + """Test that we handle malformed incoming sample_rand.""" + sentry_init(traces_sample_rate=1.0) + + baggage = f"sentry-sample_rand={incoming_sample_rand},sentry-trace_id=771a43a4192642f0b136d5159a501700" # noqa: E231 + sentry_trace = "771a43a4192642f0b136d5159a501700-1234567890abcdef" + + with sentry_sdk.continue_trace( + {BAGGAGE_HEADER_NAME: baggage, SENTRY_TRACE_HEADER_NAME: sentry_trace} + ): + with sentry_sdk.start_span(): + pass + + # The behavior here is undefined since we got a broken incoming trace, + # so as long as the SDK doesn't produce an error we consider this + # testcase a success. + + +@pytest.mark.parametrize("incoming", ((0.0, "true"), (1.0, "false"))) +def test_invalid_incoming_sampled_and_sample_rate(sentry_init, incoming): + """ + Test that we don't error out in case we can't generate a sample_rand that + would respect the incoming sampled and sample_rate. + """ + sentry_init(traces_sample_rate=1.0) + + sample_rate, sampled = incoming + + baggage = ( + f"sentry-sample_rate={sample_rate}," # noqa: E231 + f"sentry-sampled={sampled}," # noqa: E231 + "sentry-trace_id=771a43a4192642f0b136d5159a501700" + ) + sentry_trace = f"771a43a4192642f0b136d5159a501700-1234567890abcdef-{1 if sampled == 'true' else 0}" + + with sentry_sdk.continue_trace( + {BAGGAGE_HEADER_NAME: baggage, SENTRY_TRACE_HEADER_NAME: sentry_trace} + ): + with sentry_sdk.start_span(): + pass + + # The behavior here is undefined since we got a broken incoming trace, + # so as long as the SDK doesn't produce an error we consider this + # testcase a success. diff --git a/tests/tracing/test_sample_rand_propagation.py b/tests/tracing/test_sample_rand_propagation.py index ea3ea548ff..f598b24154 100644 --- a/tests/tracing/test_sample_rand_propagation.py +++ b/tests/tracing/test_sample_rand_propagation.py @@ -7,37 +7,38 @@ """ from unittest import mock -from unittest.mock import Mock import sentry_sdk -def test_continue_trace_with_sample_rand(): +def test_continue_trace_with_sample_rand(sentry_init): """ Test that an incoming sample_rand is propagated onto the transaction's baggage. """ + sentry_init() + headers = { "sentry-trace": "00000000000000000000000000000000-0000000000000000-0", "baggage": "sentry-sample_rand=0.1,sentry-sample_rate=0.5", } - transaction = sentry_sdk.continue_trace(headers) - assert transaction.get_baggage().sentry_items["sample_rand"] == "0.1" + with sentry_sdk.continue_trace(headers): + with sentry_sdk.start_span(name="root-span") as root_span: + assert root_span.get_baggage().sentry_items["sample_rand"] == "0.1" -def test_continue_trace_missing_sample_rand(): +def test_continue_trace_missing_sample_rand(sentry_init): """ Test that a missing sample_rand is filled in onto the transaction's baggage. """ + sentry_init() headers = { "sentry-trace": "00000000000000000000000000000000-0000000000000000", "baggage": "sentry-placeholder=asdf", } - mock_uniform = Mock(return_value=0.5) - - with mock.patch("sentry_sdk.tracing_utils.Random.uniform", mock_uniform): - transaction = sentry_sdk.continue_trace(headers) - - assert transaction.get_baggage().sentry_items["sample_rand"] == "0.500000" + with mock.patch("sentry_sdk.tracing_utils.Random.uniform", return_value=0.5): + with sentry_sdk.continue_trace(headers): + with sentry_sdk.start_span(name="root-span") as root_span: + assert root_span.get_baggage().sentry_items["sample_rand"] == "0.500000" diff --git a/tests/tracing/test_sampling.py b/tests/tracing/test_sampling.py index b418e5a572..59780729b7 100644 --- a/tests/tracing/test_sampling.py +++ b/tests/tracing/test_sampling.py @@ -6,7 +6,7 @@ import sentry_sdk from sentry_sdk import start_span, capture_exception -from sentry_sdk.tracing_utils import Baggage +from sentry_sdk.tracing import BAGGAGE_HEADER_NAME, SENTRY_TRACE_HEADER_NAME from sentry_sdk.utils import logger @@ -59,9 +59,14 @@ def test_uses_traces_sample_rate_correctly( ): sentry_init(traces_sample_rate=traces_sample_rate) - baggage = Baggage(sentry_items={"sample_rand": "0.500000"}) - root_span = start_span(name="dogpark", baggage=baggage) - assert root_span.sampled is expected_decision + with sentry_sdk.continue_trace( + { + BAGGAGE_HEADER_NAME: "sentry-sample_rand=0.500000,sentry-trace_id=397f36434d07b20135324b2e6ae70c77", + SENTRY_TRACE_HEADER_NAME: "397f36434d07b20135324b2e6ae70c77-1234567890abcdef", + } + ): + with start_span(name="dogpark") as root_span: + assert root_span.sampled is expected_decision @pytest.mark.parametrize( @@ -75,9 +80,14 @@ def test_uses_traces_sampler_return_value_correctly( ): sentry_init(traces_sampler=mock.Mock(return_value=traces_sampler_return_value)) - baggage = Baggage(sentry_items={"sample_rand": "0.500000"}) - root_span = start_span(name="dogpark", baggage=baggage) - assert root_span.sampled is expected_decision + with sentry_sdk.continue_trace( + { + BAGGAGE_HEADER_NAME: "sentry-sample_rand=0.500000,sentry-trace_id=397f36434d07b20135324b2e6ae70c77", + SENTRY_TRACE_HEADER_NAME: "397f36434d07b20135324b2e6ae70c77-1234567890abcdef", + } + ): + with start_span(name="dogpark") as root_span: + assert root_span.sampled is expected_decision @pytest.mark.parametrize("traces_sampler_return_value", [True, False]) From ae2b5245897c0e33227188373a0af30a8210095b Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Fri, 28 Mar 2025 14:13:36 +0100 Subject: [PATCH 202/244] Recreate test matrix --- .github/workflows/test-integrations-ai.yml | 2 -- .github/workflows/test-integrations-cloud.yml | 6 ------ .github/workflows/test-integrations-common.yml | 1 - .github/workflows/test-integrations-flags.yml | 1 - .github/workflows/test-integrations-gevent.yml | 1 - .github/workflows/test-integrations-graphql.yml | 1 - .github/workflows/test-integrations-misc.yml | 1 - .github/workflows/test-integrations-network.yml | 2 -- .github/workflows/test-integrations-tasks.yml | 2 -- .github/workflows/test-integrations-web-1.yml | 10 ++-------- .github/workflows/test-integrations-web-2.yml | 2 -- .../split_tox_gh_actions/templates/test_group.jinja | 5 +---- 12 files changed, 3 insertions(+), 31 deletions(-) diff --git a/.github/workflows/test-integrations-ai.yml b/.github/workflows/test-integrations-ai.yml index 632119d9f4..f98feeadd4 100644 --- a/.github/workflows/test-integrations-ai.yml +++ b/.github/workflows/test-integrations-ai.yml @@ -34,7 +34,6 @@ jobs: steps: - uses: actions/checkout@v4.2.2 - uses: actions/setup-python@v5 - if: ${{ matrix.python-version != '3.6' }} with: python-version: ${{ matrix.python-version }} allow-prereleases: true @@ -97,7 +96,6 @@ jobs: steps: - uses: actions/checkout@v4.2.2 - uses: actions/setup-python@v5 - if: ${{ matrix.python-version != '3.6' }} with: python-version: ${{ matrix.python-version }} allow-prereleases: true diff --git a/.github/workflows/test-integrations-cloud.yml b/.github/workflows/test-integrations-cloud.yml index e1b5bda21e..24c1bf7838 100644 --- a/.github/workflows/test-integrations-cloud.yml +++ b/.github/workflows/test-integrations-cloud.yml @@ -35,12 +35,9 @@ jobs: docker: image: docker:dind # Required for Docker network management options: --privileged # Required for Docker-in-Docker operations - # Use Docker container only for Python 3.6 - container: ${{ matrix.python-version == '3.6' && 'python:3.6' || null }} steps: - uses: actions/checkout@v4.2.2 - uses: actions/setup-python@v5 - if: ${{ matrix.python-version != '3.6' }} with: python-version: ${{ matrix.python-version }} allow-prereleases: true @@ -104,12 +101,9 @@ jobs: docker: image: docker:dind # Required for Docker network management options: --privileged # Required for Docker-in-Docker operations - # Use Docker container only for Python 3.6 - container: ${{ matrix.python-version == '3.6' && 'python:3.6' || null }} steps: - uses: actions/checkout@v4.2.2 - uses: actions/setup-python@v5 - if: ${{ matrix.python-version != '3.6' }} with: python-version: ${{ matrix.python-version }} allow-prereleases: true diff --git a/.github/workflows/test-integrations-common.yml b/.github/workflows/test-integrations-common.yml index f525916d18..9b13276313 100644 --- a/.github/workflows/test-integrations-common.yml +++ b/.github/workflows/test-integrations-common.yml @@ -34,7 +34,6 @@ jobs: steps: - uses: actions/checkout@v4.2.2 - uses: actions/setup-python@v5 - if: ${{ matrix.python-version != '3.6' }} with: python-version: ${{ matrix.python-version }} allow-prereleases: true diff --git a/.github/workflows/test-integrations-flags.yml b/.github/workflows/test-integrations-flags.yml index 1fa6eef844..cc4580cc22 100644 --- a/.github/workflows/test-integrations-flags.yml +++ b/.github/workflows/test-integrations-flags.yml @@ -34,7 +34,6 @@ jobs: steps: - uses: actions/checkout@v4.2.2 - uses: actions/setup-python@v5 - if: ${{ matrix.python-version != '3.6' }} with: python-version: ${{ matrix.python-version }} allow-prereleases: true diff --git a/.github/workflows/test-integrations-gevent.yml b/.github/workflows/test-integrations-gevent.yml index 81f1c75c41..ee48fe4d95 100644 --- a/.github/workflows/test-integrations-gevent.yml +++ b/.github/workflows/test-integrations-gevent.yml @@ -34,7 +34,6 @@ jobs: steps: - uses: actions/checkout@v4.2.2 - uses: actions/setup-python@v5 - if: ${{ matrix.python-version != '3.6' }} with: python-version: ${{ matrix.python-version }} allow-prereleases: true diff --git a/.github/workflows/test-integrations-graphql.yml b/.github/workflows/test-integrations-graphql.yml index c3375a8550..09890d7662 100644 --- a/.github/workflows/test-integrations-graphql.yml +++ b/.github/workflows/test-integrations-graphql.yml @@ -34,7 +34,6 @@ jobs: steps: - uses: actions/checkout@v4.2.2 - uses: actions/setup-python@v5 - if: ${{ matrix.python-version != '3.6' }} with: python-version: ${{ matrix.python-version }} allow-prereleases: true diff --git a/.github/workflows/test-integrations-misc.yml b/.github/workflows/test-integrations-misc.yml index a152111d8c..b9c347933c 100644 --- a/.github/workflows/test-integrations-misc.yml +++ b/.github/workflows/test-integrations-misc.yml @@ -34,7 +34,6 @@ jobs: steps: - uses: actions/checkout@v4.2.2 - uses: actions/setup-python@v5 - if: ${{ matrix.python-version != '3.6' }} with: python-version: ${{ matrix.python-version }} allow-prereleases: true diff --git a/.github/workflows/test-integrations-network.yml b/.github/workflows/test-integrations-network.yml index 93f1ab1bca..c31f35758a 100644 --- a/.github/workflows/test-integrations-network.yml +++ b/.github/workflows/test-integrations-network.yml @@ -34,7 +34,6 @@ jobs: steps: - uses: actions/checkout@v4.2.2 - uses: actions/setup-python@v5 - if: ${{ matrix.python-version != '3.6' }} with: python-version: ${{ matrix.python-version }} allow-prereleases: true @@ -89,7 +88,6 @@ jobs: steps: - uses: actions/checkout@v4.2.2 - uses: actions/setup-python@v5 - if: ${{ matrix.python-version != '3.6' }} with: python-version: ${{ matrix.python-version }} allow-prereleases: true diff --git a/.github/workflows/test-integrations-tasks.yml b/.github/workflows/test-integrations-tasks.yml index 23afa43b1a..3c80bd7146 100644 --- a/.github/workflows/test-integrations-tasks.yml +++ b/.github/workflows/test-integrations-tasks.yml @@ -34,7 +34,6 @@ jobs: steps: - uses: actions/checkout@v4.2.2 - uses: actions/setup-python@v5 - if: ${{ matrix.python-version != '3.6' }} with: python-version: ${{ matrix.python-version }} allow-prereleases: true @@ -111,7 +110,6 @@ jobs: steps: - uses: actions/checkout@v4.2.2 - uses: actions/setup-python@v5 - if: ${{ matrix.python-version != '3.6' }} with: python-version: ${{ matrix.python-version }} allow-prereleases: true diff --git a/.github/workflows/test-integrations-web-1.yml b/.github/workflows/test-integrations-web-1.yml index 9b6e3fe00a..a43112be8f 100644 --- a/.github/workflows/test-integrations-web-1.yml +++ b/.github/workflows/test-integrations-web-1.yml @@ -46,15 +46,12 @@ jobs: ports: - 5432:5432 env: - SENTRY_PYTHON_TEST_POSTGRES_HOST: ${{ matrix.python-version == '3.6' && 'postgres' || 'localhost' }} + SENTRY_PYTHON_TEST_POSTGRES_HOST: localhost SENTRY_PYTHON_TEST_POSTGRES_USER: postgres SENTRY_PYTHON_TEST_POSTGRES_PASSWORD: sentry - # Use Docker container only for Python 3.6 - container: ${{ matrix.python-version == '3.6' && 'python:3.6' || null }} steps: - uses: actions/checkout@v4.2.2 - uses: actions/setup-python@v5 - if: ${{ matrix.python-version != '3.6' }} with: python-version: ${{ matrix.python-version }} allow-prereleases: true @@ -125,15 +122,12 @@ jobs: ports: - 5432:5432 env: - SENTRY_PYTHON_TEST_POSTGRES_HOST: ${{ matrix.python-version == '3.6' && 'postgres' || 'localhost' }} + SENTRY_PYTHON_TEST_POSTGRES_HOST: localhost SENTRY_PYTHON_TEST_POSTGRES_USER: postgres SENTRY_PYTHON_TEST_POSTGRES_PASSWORD: sentry - # Use Docker container only for Python 3.6 - container: ${{ matrix.python-version == '3.6' && 'python:3.6' || null }} steps: - uses: actions/checkout@v4.2.2 - uses: actions/setup-python@v5 - if: ${{ matrix.python-version != '3.6' }} with: python-version: ${{ matrix.python-version }} allow-prereleases: true diff --git a/.github/workflows/test-integrations-web-2.yml b/.github/workflows/test-integrations-web-2.yml index c17400e92b..70bf07bdb5 100644 --- a/.github/workflows/test-integrations-web-2.yml +++ b/.github/workflows/test-integrations-web-2.yml @@ -34,7 +34,6 @@ jobs: steps: - uses: actions/checkout@v4.2.2 - uses: actions/setup-python@v5 - if: ${{ matrix.python-version != '3.6' }} with: python-version: ${{ matrix.python-version }} allow-prereleases: true @@ -117,7 +116,6 @@ jobs: steps: - uses: actions/checkout@v4.2.2 - uses: actions/setup-python@v5 - if: ${{ matrix.python-version != '3.6' }} with: python-version: ${{ matrix.python-version }} allow-prereleases: true diff --git a/scripts/split_tox_gh_actions/templates/test_group.jinja b/scripts/split_tox_gh_actions/templates/test_group.jinja index cfe5520fc6..01938ed6c6 100644 --- a/scripts/split_tox_gh_actions/templates/test_group.jinja +++ b/scripts/split_tox_gh_actions/templates/test_group.jinja @@ -30,17 +30,14 @@ ports: - 5432:5432 env: - SENTRY_PYTHON_TEST_POSTGRES_HOST: {% raw %}${{ matrix.python-version == '3.6' && 'postgres' || 'localhost' }}{% endraw %} + SENTRY_PYTHON_TEST_POSTGRES_HOST: localhost SENTRY_PYTHON_TEST_POSTGRES_USER: postgres SENTRY_PYTHON_TEST_POSTGRES_PASSWORD: sentry {% endif %} - # Use Docker container only for Python 3.6 - {% raw %}container: ${{ matrix.python-version == '3.6' && 'python:3.6' || null }}{% endraw %} steps: - uses: actions/checkout@v4.2.2 - uses: actions/setup-python@v5 - {% raw %}if: ${{ matrix.python-version != '3.6' }}{% endraw %} with: python-version: {% raw %}${{ matrix.python-version }}{% endraw %} allow-prereleases: true From ab7b76becf056166b9964d2cc0fb7d7fb1371f7e Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Fri, 28 Mar 2025 14:17:09 +0100 Subject: [PATCH 203/244] Fix db test suite --- .github/workflows/test-integrations-dbs.yml | 170 +++++++++++++++++++- 1 file changed, 169 insertions(+), 1 deletion(-) diff --git a/.github/workflows/test-integrations-dbs.yml b/.github/workflows/test-integrations-dbs.yml index 80f38f142f..ecb5bfdff8 100644 --- a/.github/workflows/test-integrations-dbs.yml +++ b/.github/workflows/test-integrations-dbs.yml @@ -31,7 +31,175 @@ jobs: matrix: python-version: ["3.7","3.8","3.11","3.12","3.13"] os: [ubuntu-22.04] - container: ${{ (matrix.python-version == '3.6' || matrix.python-version == '3.7') && 'python:${{ matrix.python-version }}' || null }} + services: + postgres: + image: postgres + env: + POSTGRES_PASSWORD: sentry + # Set health checks to wait until postgres has started + options: >- + --health-cmd pg_isready + --health-interval 10s + --health-timeout 5s + --health-retries 5 + # Maps tcp port 5432 on service container to the host + ports: + - 5432:5432 + env: + SENTRY_PYTHON_TEST_POSTGRES_HOST: localhost + SENTRY_PYTHON_TEST_POSTGRES_USER: postgres + SENTRY_PYTHON_TEST_POSTGRES_PASSWORD: sentry + steps: + - uses: actions/checkout@v4.2.2 + - uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + allow-prereleases: true + - name: "Setup ClickHouse Server" + uses: getsentry/action-clickhouse-in-ci@v1.6 + - name: Setup Test Env + run: | + pip install "coverage[toml]" tox + - name: Erase coverage + run: | + coverage erase + - name: Test asyncpg latest + run: | + set -x # print commands that are executed + ./scripts/runtox.sh "py${{ matrix.python-version }}-asyncpg-latest" + - name: Test clickhouse_driver latest + run: | + set -x # print commands that are executed + ./scripts/runtox.sh "py${{ matrix.python-version }}-clickhouse_driver-latest" + - name: Test pymongo latest + run: | + set -x # print commands that are executed + ./scripts/runtox.sh "py${{ matrix.python-version }}-pymongo-latest" + - name: Test redis latest + run: | + set -x # print commands that are executed + ./scripts/runtox.sh "py${{ matrix.python-version }}-redis-latest" + - name: Test redis_py_cluster_legacy latest + run: | + set -x # print commands that are executed + ./scripts/runtox.sh "py${{ matrix.python-version }}-redis_py_cluster_legacy-latest" + - name: Test sqlalchemy latest + run: | + set -x # print commands that are executed + ./scripts/runtox.sh "py${{ matrix.python-version }}-sqlalchemy-latest" + - name: Generate coverage XML + if: ${{ !cancelled() }} + run: | + coverage combine .coverage-sentry-* + coverage xml + - name: Upload coverage to Codecov + if: ${{ !cancelled() }} + uses: codecov/codecov-action@v5.4.0 + with: + token: ${{ secrets.CODECOV_TOKEN }} + files: coverage.xml + # make sure no plugins alter our coverage reports + plugin: noop + verbose: true + - name: Upload test results to Codecov + if: ${{ !cancelled() }} + uses: codecov/test-results-action@v1 + with: + token: ${{ secrets.CODECOV_TOKEN }} + files: .junitxml + verbose: true + test-dbs-pinned: + name: DBs (pinned) + timeout-minutes: 30 + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + python-version: ["3.7","3.8","3.9","3.10","3.11","3.12","3.13"] + os: [ubuntu-22.04] + services: + postgres: + image: postgres + env: + POSTGRES_PASSWORD: sentry + # Set health checks to wait until postgres has started + options: >- + --health-cmd pg_isready + --health-interval 10s + --health-timeout 5s + --health-retries 5 + # Maps tcp port 5432 on service container to the host + ports: + - 5432:5432 + env: + SENTRY_PYTHON_TEST_POSTGRES_HOST: localhost + SENTRY_PYTHON_TEST_POSTGRES_USER: postgres + SENTRY_PYTHON_TEST_POSTGRES_PASSWORD: sentry + steps: + - uses: actions/checkout@v4.2.2 + - uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + allow-prereleases: true + - name: "Setup ClickHouse Server" + uses: getsentry/action-clickhouse-in-ci@v1.6 + - name: Setup Test Env + run: | + pip install "coverage[toml]" tox + - name: Erase coverage + run: | + coverage erase + - name: Test asyncpg pinned + run: | + set -x # print commands that are executed + ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-asyncpg" + - name: Test clickhouse_driver pinned + run: | + set -x # print commands that are executed + ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-clickhouse_driver" + - name: Test pymongo pinned + run: | + set -x # print commands that are executed + ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-pymongo" + - name: Test redis pinned + run: | + set -x # print commands that are executed + ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-redis" + - name: Test redis_py_cluster_legacy pinned + run: | + set -x # print commands that are executed + ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-redis_py_cluster_legacy" + - name: Test sqlalchemy pinned + run: | + set -x # print commands that are executed + ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-sqlalchemy" + - name: Generate coverage XML + if: ${{ !cancelled() }} + run: | + coverage combine .coverage-sentry-* + coverage xml + - name: Upload coverage to Codecov + if: ${{ !cancelled() }} + uses: codecov/codecov-action@v5.4.0 + with: + token: ${{ secrets.CODECOV_TOKEN }} + files: coverage.xml + # make sure no plugins alter our coverage reports + plugin: noop + verbose: true + - name: Upload test results to Codecov + if: ${{ !cancelled() }} + uses: codecov/test-results-action@v1 + with: + token: ${{ secrets.CODECOV_TOKEN }} + files: .junitxml + verbose: true + check_required_tests: + name: All pinned DBs tests passed + needs: test-dbs-pinned + # Always run this, even if a dependent job failed + if: always() + runs-on: ubuntu-22.04 steps: - name: Check for failures if: contains(needs.test-dbs-pinned.result, 'failure') || contains(needs.test-dbs-pinned.result, 'skipped') From d5a09bcd18d1dde0a5e6cfeeabb6779174bbd4d4 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Fri, 28 Mar 2025 14:43:44 +0100 Subject: [PATCH 204/244] Update Celery tests (#4211) Fixing Celery tests in Potel --- scripts/populate_tox/config.py | 2 +- tests/integrations/celery/test_celery.py | 12 ++++++++---- .../celery/test_update_celery_task_headers.py | 18 ++++++++++-------- tox.ini | 7 +++---- 4 files changed, 22 insertions(+), 17 deletions(-) diff --git a/scripts/populate_tox/config.py b/scripts/populate_tox/config.py index bad4f662b4..438d127a05 100644 --- a/scripts/populate_tox/config.py +++ b/scripts/populate_tox/config.py @@ -23,8 +23,8 @@ "package": "celery", "deps": { "*": ["newrelic", "redis"], - "py3.7": ["importlib-metadata<5.0"], }, + "python": ">=3.8", }, "clickhouse_driver": { "package": "clickhouse-driver", diff --git a/tests/integrations/celery/test_celery.py b/tests/integrations/celery/test_celery.py index 241c79dc9d..821a3bd10e 100644 --- a/tests/integrations/celery/test_celery.py +++ b/tests/integrations/celery/test_celery.py @@ -228,7 +228,14 @@ def dummy_task(x, y): ) assert submission_event["spans"] == [ { - "data": ApproxDict(), + "data": { + "sentry.name": "dummy_task", + "sentry.op": "queue.submit.celery", + "sentry.origin": "auto.queue.celery", + "sentry.source": "custom", + "thread.id": mock.ANY, + "thread.name": mock.ANY, + }, "description": "dummy_task", "op": "queue.submit.celery", "origin": "auto.queue.celery", @@ -238,9 +245,6 @@ def dummy_task(x, y): "timestamp": submission_event["spans"][0]["timestamp"], "trace_id": str(root_span.trace_id), "status": "ok", - "tags": { - "status": "ok", - }, } ] diff --git a/tests/integrations/celery/test_update_celery_task_headers.py b/tests/integrations/celery/test_update_celery_task_headers.py index 3eb9bd7e88..5b76bee076 100644 --- a/tests/integrations/celery/test_update_celery_task_headers.py +++ b/tests/integrations/celery/test_update_celery_task_headers.py @@ -7,6 +7,7 @@ from sentry_sdk.integrations.celery import _update_celery_task_headers import sentry_sdk from sentry_sdk.tracing_utils import Baggage +from tests.conftest import SortedBaggage BAGGAGE_VALUE = ( @@ -83,10 +84,11 @@ def test_span_with_transaction(sentry_init): assert outgoing_headers["sentry-trace"] == span.to_traceparent() assert outgoing_headers["headers"]["sentry-trace"] == span.to_traceparent() - assert outgoing_headers["baggage"] == transaction.get_baggage().serialize() - assert ( - outgoing_headers["headers"]["baggage"] - == transaction.get_baggage().serialize() + assert outgoing_headers["baggage"] == SortedBaggage( + transaction.get_baggage().serialize() + ) + assert outgoing_headers["headers"]["baggage"] == SortedBaggage( + transaction.get_baggage().serialize() ) @@ -117,11 +119,11 @@ def test_span_with_transaction_custom_headers(sentry_init): if x is not None and x != "" ] ) - assert outgoing_headers["baggage"] == combined_baggage.serialize( - include_third_party=True + assert outgoing_headers["baggage"] == SortedBaggage( + combined_baggage.serialize(include_third_party=True) ) - assert outgoing_headers["headers"]["baggage"] == combined_baggage.serialize( - include_third_party=True + assert outgoing_headers["headers"]["baggage"] == SortedBaggage( + combined_baggage.serialize(include_third_party=True) ) diff --git a/tox.ini b/tox.ini index 76521058e2..df02ee31ea 100644 --- a/tox.ini +++ b/tox.ini @@ -10,7 +10,7 @@ # The file (and all resulting CI YAMLs) then need to be regenerated via # "scripts/generate-test-files.sh". # -# Last generated: 2025-03-27T10:11:24.425788+00:00 +# Last generated: 2025-03-28T12:52:29.636426+00:00 [tox] requires = @@ -226,8 +226,8 @@ envlist = # ~~~ Tasks ~~~ - {py3.7,py3.8}-celery-v4.4.7 - {py3.7,py3.8}-celery-v5.0.5 + {py3.8}-celery-v4.4.7 + {py3.8}-celery-v5.0.5 {py3.8,py3.11,py3.12}-celery-v5.4.0 {py3.8,py3.12,py3.13}-celery-v5.5.0rc5 @@ -628,7 +628,6 @@ deps = celery-v5.5.0rc5: celery==5.5.0rc5 celery: newrelic celery: redis - py3.7-celery: importlib-metadata<5.0 dramatiq-v1.9.0: dramatiq==1.9.0 dramatiq-v1.12.3: dramatiq==1.12.3 From 9cf068bc23a1fcc6bca7694dea2fef21eb401b98 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Mon, 31 Mar 2025 10:54:28 +0200 Subject: [PATCH 205/244] Make continuous profiler work in POtel span_processor (#4098) Fixes #4063 Co-authored-by: Ivana Kellyer --- .../opentelemetry/span_processor.py | 18 +++++++++++++++++- tests/profiler/test_continuous_profiler.py | 6 +++--- 2 files changed, 20 insertions(+), 4 deletions(-) diff --git a/sentry_sdk/integrations/opentelemetry/span_processor.py b/sentry_sdk/integrations/opentelemetry/span_processor.py index c7b3fa30ab..d82d6a03e9 100644 --- a/sentry_sdk/integrations/opentelemetry/span_processor.py +++ b/sentry_sdk/integrations/opentelemetry/span_processor.py @@ -18,6 +18,7 @@ from sentry_sdk.profiler.continuous_profiler import ( try_autostart_continuous_profiler, get_profiler_id, + try_profile_lifecycle_trace_start, ) from sentry_sdk.profiler.transaction_profiler import Profile from sentry_sdk.integrations.opentelemetry.sampler import create_sampling_context @@ -80,7 +81,8 @@ def on_end(self, span): is_root_span = not span.parent or span.parent.is_remote if is_root_span: - # if have a root span ending, we build a transaction and send it + # if have a root span ending, stop the profiler, build a transaction and send it + self._stop_profile(span) self._flush_root_span(span) else: self._append_child_span(span) @@ -113,6 +115,7 @@ def _add_root_span(self, span, parent_span): def _start_profile(self, span): # type: (Span) -> None try_autostart_continuous_profiler() + profiler_id = get_profiler_id() thread_id, thread_name = get_current_thread_meta() @@ -131,6 +134,7 @@ def _start_profile(self, span): # unix timestamp that is on span.start_time # setting it to 0 means the profiler will internally measure time on start profile = Profile(sampled, 0) + sampling_context = create_sampling_context( span.name, span.attributes, span.parent, span.context.trace_id ) @@ -138,6 +142,18 @@ def _start_profile(self, span): profile.__enter__() set_sentry_meta(span, "profile", profile) + continuous_profile = try_profile_lifecycle_trace_start() + profiler_id = get_profiler_id() + if profiler_id: + span.set_attribute(SPANDATA.PROFILER_ID, profiler_id) + set_sentry_meta(span, "continuous_profile", continuous_profile) + + def _stop_profile(self, span): + # type: (ReadableSpan) -> None + continuous_profiler = get_sentry_meta(span, "continuous_profile") + if continuous_profiler: + continuous_profiler.stop() + def _flush_root_span(self, span): # type: (ReadableSpan) -> None transaction_event = self._root_span_to_transaction_event(span) diff --git a/tests/profiler/test_continuous_profiler.py b/tests/profiler/test_continuous_profiler.py index 860307e2e1..a3c3e54874 100644 --- a/tests/profiler/test_continuous_profiler.py +++ b/tests/profiler/test_continuous_profiler.py @@ -239,7 +239,7 @@ def test_continuous_profiler_auto_start_and_manual_stop( with sentry_sdk.start_span(name="profiling"): with sentry_sdk.start_span(op="op"): - time.sleep(0.05) + time.sleep(0.1) assert_single_transaction_with_profile_chunks(envelopes, thread) @@ -250,7 +250,7 @@ def test_continuous_profiler_auto_start_and_manual_stop( with sentry_sdk.start_span(name="profiling"): with sentry_sdk.start_span(op="op"): - time.sleep(0.05) + time.sleep(0.1) assert_single_transaction_without_profile_chunks(envelopes) @@ -260,7 +260,7 @@ def test_continuous_profiler_auto_start_and_manual_stop( with sentry_sdk.start_span(name="profiling"): with sentry_sdk.start_span(op="op"): - time.sleep(0.05) + time.sleep(0.1) assert_single_transaction_with_profile_chunks(envelopes, thread) From d32aaf0d749ad28ec1f0030e9d8b62dd97a22435 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Mon, 31 Mar 2025 15:19:13 +0200 Subject: [PATCH 206/244] fixed merge issue --- tox.ini | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tox.ini b/tox.ini index 88dc67bfea..7465d25367 100644 --- a/tox.ini +++ b/tox.ini @@ -10,7 +10,7 @@ # The file (and all resulting CI YAMLs) then need to be regenerated via # "scripts/generate-test-files.sh". # -# Last generated: 2025-03-28T12:52:29.636426+00:00 +# Last generated: 2025-03-31T13:18:36.169759+00:00 [tox] requires = @@ -515,9 +515,9 @@ deps = # RQ (Redis Queue) # https://github.com/jamesls/fakeredis/issues/245 rq-v{0.13,1.0,1.5,1.10}: fakeredis>=1.0,<1.7.4 - rq-v{1.15,1.16}: fakeredis<2.28.0 + rq-v{1.15,1.16}: fakeredis<2.28.0 py3.7-rq-v{1.15,1.16}: fakeredis!=2.26.0 # https://github.com/cunla/fakeredis-py/issues/341 - rq-latest: fakeredis<2.28.0 + rq-latest: fakeredis<2.28.0 py3.7-rq-latest: fakeredis!=2.26.0 # https://github.com/cunla/fakeredis-py/issues/341 rq-v0.6: rq~=0.6.0 rq-v0.13: rq~=0.13.0 From bff8fddc85857a67e932ccbdd1e44edc98270cea Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Tue, 1 Apr 2025 15:10:35 +0200 Subject: [PATCH 207/244] Fixed some tests (#4217) - function transports have been removed. removing the related test - `profiles_sample_rate` is not experimental anymore - `continuous_profiling_mode` has been removed so only the top level `profiler_mode` exists. (so the if is not necessary anymore) --- tests/integrations/wsgi/test_wsgi.py | 2 +- tests/profiler/test_continuous_profiler.py | 40 ++++++---------------- tests/test_client.py | 7 ---- 3 files changed, 12 insertions(+), 37 deletions(-) diff --git a/tests/integrations/wsgi/test_wsgi.py b/tests/integrations/wsgi/test_wsgi.py index 487ccbfd69..76c80f6c6a 100644 --- a/tests/integrations/wsgi/test_wsgi.py +++ b/tests/integrations/wsgi/test_wsgi.py @@ -437,7 +437,7 @@ def test_app(environ, start_response): sentry_init( traces_sample_rate=1.0, - _experiments={"profiles_sample_rate": 1.0}, + profiles_sample_rate=1.0, ) app = SentryWsgiMiddleware(test_app) envelopes = capture_envelopes() diff --git a/tests/profiler/test_continuous_profiler.py b/tests/profiler/test_continuous_profiler.py index a3c3e54874..27994648f8 100644 --- a/tests/profiler/test_continuous_profiler.py +++ b/tests/profiler/test_continuous_profiler.py @@ -24,25 +24,16 @@ requires_gevent = pytest.mark.skipif(gevent is None, reason="gevent not enabled") -def get_client_options(use_top_level_profiler_mode): +def get_client_options(): def client_options( mode=None, auto_start=None, profile_session_sample_rate=1.0, lifecycle="manual" ): - if use_top_level_profiler_mode: - return { - "profile_lifecycle": lifecycle, - "profiler_mode": mode, - "profile_session_sample_rate": profile_session_sample_rate, - "_experiments": { - "continuous_profiling_auto_start": auto_start, - }, - } return { "profile_lifecycle": lifecycle, + "profiler_mode": mode, "profile_session_sample_rate": profile_session_sample_rate, "_experiments": { "continuous_profiling_auto_start": auto_start, - "continuous_profiling_mode": mode, }, } @@ -60,8 +51,7 @@ def client_options( @pytest.mark.parametrize( "make_options", [ - pytest.param(get_client_options(True), id="non-experiment"), - pytest.param(get_client_options(False), id="experiment"), + pytest.param(get_client_options()), ], ) def test_continuous_profiler_invalid_mode(mode, make_options, teardown_profiling): @@ -83,8 +73,7 @@ def test_continuous_profiler_invalid_mode(mode, make_options, teardown_profiling @pytest.mark.parametrize( "make_options", [ - pytest.param(get_client_options(True), id="non-experiment"), - pytest.param(get_client_options(False), id="experiment"), + pytest.param(get_client_options()), ], ) def test_continuous_profiler_valid_mode(mode, make_options, teardown_profiling): @@ -106,8 +95,7 @@ def test_continuous_profiler_valid_mode(mode, make_options, teardown_profiling): @pytest.mark.parametrize( "make_options", [ - pytest.param(get_client_options(True), id="non-experiment"), - pytest.param(get_client_options(False), id="experiment"), + pytest.param(get_client_options()), ], ) def test_continuous_profiler_setup_twice(mode, make_options, teardown_profiling): @@ -215,8 +203,7 @@ def assert_single_transaction_without_profile_chunks(envelopes): @pytest.mark.parametrize( "make_options", [ - pytest.param(get_client_options(True), id="non-experiment"), - pytest.param(get_client_options(False), id="experiment"), + pytest.param(get_client_options()), ], ) @mock.patch("sentry_sdk.profiler.continuous_profiler.PROFILE_BUFFER_SECONDS", 0.01) @@ -275,8 +262,7 @@ def test_continuous_profiler_auto_start_and_manual_stop( @pytest.mark.parametrize( "make_options", [ - pytest.param(get_client_options(True), id="non-experiment"), - pytest.param(get_client_options(False), id="experiment"), + pytest.param(get_client_options()), ], ) @mock.patch("sentry_sdk.profiler.continuous_profiler.PROFILE_BUFFER_SECONDS", 0.01) @@ -340,8 +326,7 @@ def test_continuous_profiler_manual_start_and_stop_sampled( @pytest.mark.parametrize( "make_options", [ - pytest.param(get_client_options(True), id="non-experiment"), - pytest.param(get_client_options(False), id="experiment"), + pytest.param(get_client_options()), ], ) def test_continuous_profiler_manual_start_and_stop_unsampled( @@ -382,8 +367,7 @@ def test_continuous_profiler_manual_start_and_stop_unsampled( @pytest.mark.parametrize( "make_options", [ - pytest.param(get_client_options(True), id="non-experiment"), - pytest.param(get_client_options(False), id="experiment"), + pytest.param(get_client_options()), ], ) @mock.patch("sentry_sdk.profiler.continuous_profiler.DEFAULT_SAMPLING_FREQUENCY", 21) @@ -444,8 +428,7 @@ def test_continuous_profiler_auto_start_and_stop_sampled( @pytest.mark.parametrize( "make_options", [ - pytest.param(get_client_options(True), id="non-experiment"), - pytest.param(get_client_options(False), id="experiment"), + pytest.param(get_client_options()), ], ) @mock.patch("sentry_sdk.profiler.continuous_profiler.PROFILE_BUFFER_SECONDS", 0.01) @@ -493,8 +476,7 @@ def test_continuous_profiler_auto_start_and_stop_unsampled( @pytest.mark.parametrize( "make_options", [ - pytest.param(get_client_options(True), id="non-experiment"), - pytest.param(get_client_options(False), id="experiment"), + pytest.param(get_client_options()), ], ) def test_continuous_profiler_manual_start_and_stop_noop_when_using_trace_lifecyle( diff --git a/tests/test_client.py b/tests/test_client.py index fb547e495a..9b0b4c3bdb 100644 --- a/tests/test_client.py +++ b/tests/test_client.py @@ -378,13 +378,6 @@ def test_socks_proxy(testcase, http2): ) -def test_simple_transport(sentry_init): - events = [] - sentry_init(transport=events.append) - capture_message("Hello World!") - assert events[0]["message"] == "Hello World!" - - def test_ignore_errors(sentry_init, capture_events): sentry_init(ignore_errors=[ZeroDivisionError]) events = capture_events() From 43133b390b72d4f6efdda131be5cf6551780672f Mon Sep 17 00:00:00 2001 From: Ivana Kellyer Date: Tue, 8 Apr 2025 15:20:18 +0200 Subject: [PATCH 208/244] chore: Drop even more deprecated stuff (#4193) Drop: - setting `Scope.transaction` directly - old way of setting `failed_request_status_codes` - setting the `span` argument of `Scope.trace_propagation_meta` - setting `Scope.user` directly --- MIGRATION_GUIDE.md | 4 + sentry_sdk/integrations/_wsgi_common.py | 38 +--------- sentry_sdk/integrations/django/__init__.py | 6 +- sentry_sdk/integrations/starlette.py | 29 ++----- sentry_sdk/scope.py | 46 ----------- tests/integrations/fastapi/test_fastapi.py | 43 ----------- .../integrations/starlette/test_starlette.py | 76 ------------------- 7 files changed, 15 insertions(+), 227 deletions(-) diff --git a/MIGRATION_GUIDE.md b/MIGRATION_GUIDE.md index cc35f9134b..23604f99c4 100644 --- a/MIGRATION_GUIDE.md +++ b/MIGRATION_GUIDE.md @@ -157,6 +157,10 @@ Looking to upgrade from Sentry SDK 2.x to 3.x? Here's a comprehensive list of wh - `profiles_sample_rate` and `profiler_mode` were removed from options available via `_experiments`. Use the top-level `profiles_sample_rate` and `profiler_mode` options instead. - `Transport.capture_event` has been removed. Use `Transport.capture_envelope` instead. - Function transports are no longer supported. Subclass the `Transport` instead. +- Setting `Scope.transaction` directly is no longer supported. Use `Scope.set_transaction_name()` instead. +- Passing a list or `None` for `failed_request_status_codes` in the Starlette integration is no longer supported. Pass a set of integers instead. +- The `span` argument of `Scope.trace_propagation_meta` is no longer supported. +- Setting `Scope.user` directly is no longer supported. Use `Scope.set_user()` instead. - `start_transaction` (`start_span`) no longer takes a `baggage` argument. Use the `continue_trace()` context manager instead to propagate baggage. ### Deprecated diff --git a/sentry_sdk/integrations/_wsgi_common.py b/sentry_sdk/integrations/_wsgi_common.py index 65801476d5..2d4a5f7b73 100644 --- a/sentry_sdk/integrations/_wsgi_common.py +++ b/sentry_sdk/integrations/_wsgi_common.py @@ -3,7 +3,7 @@ import sentry_sdk from sentry_sdk.scope import should_send_default_pii -from sentry_sdk.utils import AnnotatedValue, logger, SENSITIVE_DATA_SUBSTITUTE +from sentry_sdk.utils import AnnotatedValue, SENSITIVE_DATA_SUBSTITUTE try: from django.http.request import RawPostDataException @@ -19,7 +19,7 @@ from typing import MutableMapping from typing import Optional from typing import Union - from sentry_sdk._types import Event, HttpStatusCodeRange + from sentry_sdk._types import Event SENSITIVE_ENV_KEYS = ( @@ -240,37 +240,3 @@ def _request_headers_to_span_attributes(headers): attributes[f"http.request.header.{header.lower()}"] = value return attributes - - -def _in_http_status_code_range(code, code_ranges): - # type: (object, list[HttpStatusCodeRange]) -> bool - for target in code_ranges: - if isinstance(target, int): - if code == target: - return True - continue - - try: - if code in target: - return True - except TypeError: - logger.warning( - "failed_request_status_codes has to be a list of integers or containers" - ) - - return False - - -class HttpCodeRangeContainer: - """ - Wrapper to make it possible to use list[HttpStatusCodeRange] as a Container[int]. - Used for backwards compatibility with the old `failed_request_status_codes` option. - """ - - def __init__(self, code_ranges): - # type: (list[HttpStatusCodeRange]) -> None - self._code_ranges = code_ranges - - def __contains__(self, item): - # type: (object) -> bool - return _in_http_status_code_range(item, self._code_ranges) diff --git a/sentry_sdk/integrations/django/__init__.py b/sentry_sdk/integrations/django/__init__.py index 99cb7704fa..5dd45e5b18 100644 --- a/sentry_sdk/integrations/django/__init__.py +++ b/sentry_sdk/integrations/django/__init__.py @@ -415,11 +415,13 @@ def _set_transaction_name_and_source(scope, transaction_style, request): if hasattr(urlconf, "handler404"): handler = urlconf.handler404 if isinstance(handler, str): - scope.transaction = handler + scope.set_transaction_name(handler) else: - scope.transaction = transaction_from_function( + name = transaction_from_function( getattr(handler, "view_class", handler) ) + if isinstance(name, str): + scope.set_transaction_name(name) except Exception: pass diff --git a/sentry_sdk/integrations/starlette.py b/sentry_sdk/integrations/starlette.py index 3b079d58d1..e75889ea38 100644 --- a/sentry_sdk/integrations/starlette.py +++ b/sentry_sdk/integrations/starlette.py @@ -1,6 +1,5 @@ import asyncio import functools -import warnings from collections.abc import Set from copy import deepcopy from json import JSONDecodeError @@ -14,7 +13,6 @@ ) from sentry_sdk.integrations._wsgi_common import ( DEFAULT_HTTP_METHODS_TO_CAPTURE, - HttpCodeRangeContainer, _is_json_content_type, request_body_within_bounds, ) @@ -37,9 +35,9 @@ from typing import TYPE_CHECKING if TYPE_CHECKING: - from typing import Any, Awaitable, Callable, Container, Dict, Optional, Tuple, Union + from typing import Any, Awaitable, Callable, Dict, Optional, Tuple - from sentry_sdk._types import Event, HttpStatusCodeRange + from sentry_sdk._types import Event try: import starlette # type: ignore @@ -89,7 +87,7 @@ class StarletteIntegration(Integration): def __init__( self, transaction_style="url", # type: str - failed_request_status_codes=_DEFAULT_FAILED_REQUEST_STATUS_CODES, # type: Union[Set[int], list[HttpStatusCodeRange], None] + failed_request_status_codes=_DEFAULT_FAILED_REQUEST_STATUS_CODES, # type: Set[int] middleware_spans=True, # type: bool http_methods_to_capture=DEFAULT_HTTP_METHODS_TO_CAPTURE, # type: tuple[str, ...] ): @@ -103,24 +101,7 @@ def __init__( self.middleware_spans = middleware_spans self.http_methods_to_capture = tuple(map(str.upper, http_methods_to_capture)) - if isinstance(failed_request_status_codes, Set): - self.failed_request_status_codes = ( - failed_request_status_codes - ) # type: Container[int] - else: - warnings.warn( - "Passing a list or None for failed_request_status_codes is deprecated. " - "Please pass a set of int instead.", - DeprecationWarning, - stacklevel=2, - ) - - if failed_request_status_codes is None: - self.failed_request_status_codes = _DEFAULT_FAILED_REQUEST_STATUS_CODES - else: - self.failed_request_status_codes = HttpCodeRangeContainer( - failed_request_status_codes - ) + self.failed_request_status_codes = failed_request_status_codes @staticmethod def setup_once(): @@ -332,7 +313,7 @@ def _add_user_to_sentry_scope(scope): user_info.setdefault("email", starlette_user.email) sentry_scope = sentry_sdk.get_isolation_scope() - sentry_scope.user = user_info + sentry_scope.set_user(user_info) def patch_authentication_middleware(middleware_class): diff --git a/sentry_sdk/scope.py b/sentry_sdk/scope.py index 2d8d0d54f3..ff395dc1b2 100644 --- a/sentry_sdk/scope.py +++ b/sentry_sdk/scope.py @@ -565,14 +565,6 @@ def trace_propagation_meta(self, *args, **kwargs): Return meta tags which should be injected into HTML templates to allow propagation of trace information. """ - span = kwargs.pop("span", None) - if span is not None: - warnings.warn( - "The parameter `span` in trace_propagation_meta() is deprecated and will be removed in the future.", - DeprecationWarning, - stacklevel=2, - ) - meta = "" sentry_trace = self.get_traceparent() @@ -716,33 +708,6 @@ def transaction(self): # transaction) or a non-orphan span on the scope return self._span.containing_transaction - @transaction.setter - def transaction(self, value): - # type: (Any) -> None - # would be type: (Optional[str]) -> None, see https://github.com/python/mypy/issues/3004 - """When set this forces a specific transaction name to be set. - - Deprecated: use set_transaction_name instead.""" - - # XXX: the docstring above is misleading. The implementation of - # apply_to_event prefers an existing value of event.transaction over - # anything set in the scope. - # XXX: note that with the introduction of the Scope.transaction getter, - # there is a semantic and type mismatch between getter and setter. The - # getter returns a Span, the setter sets a transaction name. - # Without breaking version compatibility, we could make the setter set a - # transaction name or transaction (self._span) depending on the type of - # the value argument. - - warnings.warn( - "Assigning to scope.transaction directly is deprecated: use scope.set_transaction_name() instead.", - DeprecationWarning, - stacklevel=2, - ) - self._transaction = value - if self._span and self._span.containing_transaction: - self._span.containing_transaction.name = value - def set_transaction_name(self, name, source=None): # type: (str, Optional[str]) -> None """Set the transaction name and optionally the transaction source.""" @@ -766,17 +731,6 @@ def transaction_source(self): # type: () -> Optional[str] return self._transaction_info.get("source") - @_attr_setter - def user(self, value): - # type: (Optional[Dict[str, Any]]) -> None - """When set a specific user is bound to the scope. Deprecated in favor of set_user.""" - warnings.warn( - "The `Scope.user` setter is deprecated in favor of `Scope.set_user()`.", - DeprecationWarning, - stacklevel=2, - ) - self.set_user(value) - def set_user(self, value): # type: (Optional[Dict[str, Any]]) -> None """Sets a user for the scope.""" diff --git a/tests/integrations/fastapi/test_fastapi.py b/tests/integrations/fastapi/test_fastapi.py index 95838b1009..1c40abedcb 100644 --- a/tests/integrations/fastapi/test_fastapi.py +++ b/tests/integrations/fastapi/test_fastapi.py @@ -20,7 +20,6 @@ FASTAPI_VERSION = parse_version(fastapi.__version__) from tests.integrations.conftest import parametrize_test_configurable_status_codes -from tests.integrations.starlette import test_starlette def fastapi_app_factory(): @@ -528,48 +527,6 @@ def test_transaction_name_in_middleware( ) -@test_starlette.parametrize_test_configurable_status_codes_deprecated -def test_configurable_status_codes_deprecated( - sentry_init, - capture_events, - failed_request_status_codes, - status_code, - expected_error, -): - with pytest.warns(DeprecationWarning): - starlette_integration = StarletteIntegration( - failed_request_status_codes=failed_request_status_codes - ) - - with pytest.warns(DeprecationWarning): - fast_api_integration = FastApiIntegration( - failed_request_status_codes=failed_request_status_codes - ) - - sentry_init( - integrations=[ - starlette_integration, - fast_api_integration, - ] - ) - - events = capture_events() - - app = FastAPI() - - @app.get("/error") - async def _error(): - raise HTTPException(status_code) - - client = TestClient(app) - client.get("/error") - - if expected_error: - assert len(events) == 1 - else: - assert not events - - @pytest.mark.skipif( FASTAPI_VERSION < (0, 80), reason="Requires FastAPI >= 0.80, because earlier versions do not support HTTP 'HEAD' requests", diff --git a/tests/integrations/starlette/test_starlette.py b/tests/integrations/starlette/test_starlette.py index 33b998e15d..a3c289590b 100644 --- a/tests/integrations/starlette/test_starlette.py +++ b/tests/integrations/starlette/test_starlette.py @@ -1182,82 +1182,6 @@ def test_span_origin(sentry_init, capture_events): assert span["origin"] == "auto.http.starlette" -class NonIterableContainer: - """Wraps any container and makes it non-iterable. - - Used to test backwards compatibility with our old way of defining failed_request_status_codes, which allowed - passing in a list of (possibly non-iterable) containers. The Python standard library does not provide any built-in - non-iterable containers, so we have to define our own. - """ - - def __init__(self, inner): - self.inner = inner - - def __contains__(self, item): - return item in self.inner - - -parametrize_test_configurable_status_codes_deprecated = pytest.mark.parametrize( - "failed_request_status_codes,status_code,expected_error", - [ - (None, 500, True), - (None, 400, False), - ([500, 501], 500, True), - ([500, 501], 401, False), - ([range(400, 499)], 401, True), - ([range(400, 499)], 500, False), - ([range(400, 499), range(500, 599)], 300, False), - ([range(400, 499), range(500, 599)], 403, True), - ([range(400, 499), range(500, 599)], 503, True), - ([range(400, 403), 500, 501], 401, True), - ([range(400, 403), 500, 501], 405, False), - ([range(400, 403), 500, 501], 501, True), - ([range(400, 403), 500, 501], 503, False), - ([], 500, False), - ([NonIterableContainer(range(500, 600))], 500, True), - ([NonIterableContainer(range(500, 600))], 404, False), - ], -) -"""Test cases for configurable status codes (deprecated API). -Also used by the FastAPI tests. -""" - - -@parametrize_test_configurable_status_codes_deprecated -def test_configurable_status_codes_deprecated( - sentry_init, - capture_events, - failed_request_status_codes, - status_code, - expected_error, -): - with pytest.warns(DeprecationWarning): - starlette_integration = StarletteIntegration( - failed_request_status_codes=failed_request_status_codes - ) - - sentry_init(integrations=[starlette_integration]) - - events = capture_events() - - async def _error(request): - raise HTTPException(status_code) - - app = starlette.applications.Starlette( - routes=[ - starlette.routing.Route("/error", _error, methods=["GET"]), - ], - ) - - client = TestClient(app) - client.get("/error") - - if expected_error: - assert len(events) == 1 - else: - assert not events - - @pytest.mark.skipif( STARLETTE_VERSION < (0, 21), reason="Requires Starlette >= 0.21, because earlier versions do not support HTTP 'HEAD' requests", From 4f0651e6db500e517d085f49c33a2c70db4a957c Mon Sep 17 00:00:00 2001 From: Ivana Kellyer Date: Wed, 9 Apr 2025 09:46:46 +0200 Subject: [PATCH 209/244] Drop support for old frameworks (#4246) - trytond<5, falcon<3, django<2 are all more than 5 years old Closes https://github.com/getsentry/sentry-python/issues/4049 --- MIGRATION_GUIDE.md | 3 ++ scripts/populate_tox/config.py | 2 +- sentry_sdk/integrations/__init__.py | 5 ++- sentry_sdk/integrations/django/__init__.py | 38 +++---------------- sentry_sdk/integrations/django/templates.py | 11 +----- .../integrations/django/transactions.py | 7 +--- sentry_sdk/integrations/falcon.py | 30 +++------------ sentry_sdk/integrations/trytond.py | 5 ++- tox.ini | 22 +++++------ 9 files changed, 34 insertions(+), 89 deletions(-) diff --git a/MIGRATION_GUIDE.md b/MIGRATION_GUIDE.md index 23604f99c4..748de16657 100644 --- a/MIGRATION_GUIDE.md +++ b/MIGRATION_GUIDE.md @@ -162,6 +162,9 @@ Looking to upgrade from Sentry SDK 2.x to 3.x? Here's a comprehensive list of wh - The `span` argument of `Scope.trace_propagation_meta` is no longer supported. - Setting `Scope.user` directly is no longer supported. Use `Scope.set_user()` instead. - `start_transaction` (`start_span`) no longer takes a `baggage` argument. Use the `continue_trace()` context manager instead to propagate baggage. +- Dropped support for Django versions below 2.0. +- Dropped support for trytond versions below 5.0. +- Dropped support for Falcon versions below 3.0. ### Deprecated diff --git a/scripts/populate_tox/config.py b/scripts/populate_tox/config.py index fbfbf0bbc3..b1c61b6a14 100644 --- a/scripts/populate_tox/config.py +++ b/scripts/populate_tox/config.py @@ -33,6 +33,7 @@ "package": "django", "deps": { "*": [ + "channels[daphne]", "psycopg2-binary", "djangorestframework", "pytest-django", @@ -45,7 +46,6 @@ "Werkzeug<2.1.0", ], "<3.1": ["pytest-django<4.0"], - ">=2.0": ["channels[daphne]"], }, }, "dramatiq": { diff --git a/sentry_sdk/integrations/__init__.py b/sentry_sdk/integrations/__init__.py index fdf93cd203..fcfcccebd0 100644 --- a/sentry_sdk/integrations/__init__.py +++ b/sentry_sdk/integrations/__init__.py @@ -132,9 +132,9 @@ def iter_default_integrations(with_auto_enabling_integrations): "celery": (4, 4, 7), "chalice": (1, 16, 0), "clickhouse_driver": (0, 2, 0), - "django": (1, 8), + "django": (2, 0), "dramatiq": (1, 9), - "falcon": (1, 4), + "falcon": (3, 0), "fastapi": (0, 79, 0), "flask": (1, 1, 4), "gql": (3, 4, 1), @@ -157,6 +157,7 @@ def iter_default_integrations(with_auto_enabling_integrations): "statsig": (0, 55, 3), "strawberry": (0, 209, 5), "tornado": (6, 0), + "trytond": (5, 0), "typer": (0, 15), "unleash": (6, 0, 1), } diff --git a/sentry_sdk/integrations/django/__init__.py b/sentry_sdk/integrations/django/__init__.py index 5dd45e5b18..27b53e52a8 100644 --- a/sentry_sdk/integrations/django/__init__.py +++ b/sentry_sdk/integrations/django/__init__.py @@ -56,6 +56,7 @@ except ImportError: raise DidNotEnable("Django not installed") +from sentry_sdk.integrations.django.caching import patch_caching from sentry_sdk.integrations.django.transactions import LEGACY_RESOLVER from sentry_sdk.integrations.django.templates import ( get_template_frame_from_exception, @@ -65,11 +66,6 @@ from sentry_sdk.integrations.django.signals_handlers import patch_signals from sentry_sdk.integrations.django.views import patch_views -if DJANGO_VERSION[:2] > (1, 8): - from sentry_sdk.integrations.django.caching import patch_caching -else: - patch_caching = None # type: ignore - from typing import TYPE_CHECKING if TYPE_CHECKING: @@ -90,19 +86,6 @@ from sentry_sdk._types import Event, Hint, EventProcessor, NotImplementedType -if DJANGO_VERSION < (1, 10): - - def is_authenticated(request_user): - # type: (Any) -> bool - return request_user.is_authenticated() - -else: - - def is_authenticated(request_user): - # type: (Any) -> bool - return request_user.is_authenticated - - TRANSACTION_STYLE_VALUES = ("function_name", "url") @@ -599,7 +582,7 @@ def _set_user_info(request, event): user = getattr(request, "user", None) - if user is None or not is_authenticated(user): + if user is None or not user.is_authenticated: return try: @@ -626,20 +609,11 @@ def install_sql_hook(): except ImportError: from django.db.backends.util import CursorWrapper - try: - # django 1.6 and 1.7 compatability - from django.db.backends import BaseDatabaseWrapper - except ImportError: - # django 1.8 or later - from django.db.backends.base.base import BaseDatabaseWrapper + from django.db.backends.base.base import BaseDatabaseWrapper - try: - real_execute = CursorWrapper.execute - real_executemany = CursorWrapper.executemany - real_connect = BaseDatabaseWrapper.connect - except AttributeError: - # This won't work on Django versions < 1.6 - return + real_execute = CursorWrapper.execute + real_executemany = CursorWrapper.executemany + real_connect = BaseDatabaseWrapper.connect @ensure_integration_enabled(DjangoIntegration, real_execute) def execute(self, sql, params=None): diff --git a/sentry_sdk/integrations/django/templates.py b/sentry_sdk/integrations/django/templates.py index c9e41e24a0..53ccc60fc6 100644 --- a/sentry_sdk/integrations/django/templates.py +++ b/sentry_sdk/integrations/django/templates.py @@ -1,8 +1,8 @@ import functools from django.template import TemplateSyntaxError +from django.template.base import Origin from django.utils.safestring import mark_safe -from django import VERSION as DJANGO_VERSION import sentry_sdk from sentry_sdk.consts import OP @@ -17,13 +17,6 @@ from typing import Iterator from typing import Tuple -try: - # support Django 1.9 - from django.template.base import Origin -except ImportError: - # backward compatibility - from django.template.loader import LoaderOrigin as Origin - def get_template_frame_from_exception(exc_value): # type: (Optional[BaseException]) -> Optional[Dict[str, Any]] @@ -81,8 +74,6 @@ def rendered_content(self): SimpleTemplateResponse.rendered_content = rendered_content - if DJANGO_VERSION < (1, 7): - return import django.shortcuts real_render = django.shortcuts.render diff --git a/sentry_sdk/integrations/django/transactions.py b/sentry_sdk/integrations/django/transactions.py index 5a7d69f3c9..78b972bc37 100644 --- a/sentry_sdk/integrations/django/transactions.py +++ b/sentry_sdk/integrations/django/transactions.py @@ -19,12 +19,7 @@ from typing import Union from re import Pattern -from django import VERSION as DJANGO_VERSION - -if DJANGO_VERSION >= (2, 0): - from django.urls.resolvers import RoutePattern -else: - RoutePattern = None +from django.urls.resolvers import RoutePattern try: from django.urls import get_resolver diff --git a/sentry_sdk/integrations/falcon.py b/sentry_sdk/integrations/falcon.py index ddedcb10de..0b5c9c4fe7 100644 --- a/sentry_sdk/integrations/falcon.py +++ b/sentry_sdk/integrations/falcon.py @@ -19,8 +19,6 @@ from sentry_sdk._types import Event, EventProcessor -# In Falcon 3.0 `falcon.api_helpers` is renamed to `falcon.app_helpers` -# and `falcon.API` to `falcon.App` try: import falcon # type: ignore @@ -29,24 +27,15 @@ except ImportError: raise DidNotEnable("Falcon not installed") -try: - import falcon.app_helpers # type: ignore - - falcon_helpers = falcon.app_helpers - falcon_app_class = falcon.App - FALCON3 = True -except ImportError: - import falcon.api_helpers # type: ignore +import falcon.app_helpers # type: ignore - falcon_helpers = falcon.api_helpers - falcon_app_class = falcon.API - FALCON3 = False +falcon_helpers = falcon.app_helpers +falcon_app_class = falcon.App _FALCON_UNSET = None # type: Optional[object] -if FALCON3: # falcon.request._UNSET is only available in Falcon 3.0+ - with capture_internal_exceptions(): - from falcon.request import _UNSET as _FALCON_UNSET # type: ignore[import-not-found, no-redef] +with capture_internal_exceptions(): + from falcon.request import _UNSET as _FALCON_UNSET # type: ignore[import-not-found, no-redef] class FalconRequestExtractor(RequestExtractor): @@ -232,14 +221,7 @@ def _exception_leads_to_http_5xx(ex, response): ex, (falcon.HTTPError, falcon.http_status.HTTPStatus) ) - # We only check the HTTP status on Falcon 3 because in Falcon 2, the status on the response - # at the stage where we capture it is listed as 200, even though we would expect to see a 500 - # status. Since at the time of this change, Falcon 2 is ca. 4 years old, we have decided to - # only perform this check on Falcon 3+, despite the risk that some handled errors might be - # reported to Sentry as unhandled on Falcon 2. - return (is_server_error or is_unhandled_error) and ( - not FALCON3 or _has_http_5xx_status(response) - ) + return (is_server_error or is_unhandled_error) and _has_http_5xx_status(response) def _has_http_5xx_status(response): diff --git a/sentry_sdk/integrations/trytond.py b/sentry_sdk/integrations/trytond.py index 2c44c593a4..fd2c6f389f 100644 --- a/sentry_sdk/integrations/trytond.py +++ b/sentry_sdk/integrations/trytond.py @@ -1,8 +1,9 @@ import sentry_sdk -from sentry_sdk.integrations import Integration +from sentry_sdk.integrations import _check_minimum_version, Integration from sentry_sdk.integrations.wsgi import SentryWsgiMiddleware from sentry_sdk.utils import ensure_integration_enabled, event_from_exception +from trytond import __version__ as trytond_version # type: ignore from trytond.exceptions import TrytonException # type: ignore from trytond.wsgi import app # type: ignore @@ -19,6 +20,8 @@ def __init__(self): # type: () -> None @staticmethod def setup_once(): # type: () -> None + _check_minimum_version(TrytondWSGIIntegration, trytond_version) + app.wsgi_app = SentryWsgiMiddleware( app.wsgi_app, span_origin=TrytondWSGIIntegration.origin, diff --git a/tox.ini b/tox.ini index 1ddaa3b69c..f7137389ac 100644 --- a/tox.ini +++ b/tox.ini @@ -10,7 +10,7 @@ # The file (and all resulting CI YAMLs) then need to be regenerated via # "scripts/generate-test-files.sh". # -# Last generated: 2025-04-04T12:20:40.475012+00:00 +# Last generated: 2025-04-04T13:09:23.106982+00:00 [tox] requires = @@ -227,7 +227,7 @@ envlist = # ~~~ Web 1 ~~~ - {py3.7}-django-v1.11.29 + {py3.7}-django-v2.0.9 {py3.7,py3.8,py3.9}-django-v2.2.28 {py3.7,py3.9,py3.10}-django-v3.2.25 {py3.8,py3.11,py3.12}-django-v4.2.20 @@ -249,7 +249,7 @@ envlist = {py3.7}-bottle-v0.12.25 {py3.7,py3.8,py3.9}-bottle-v0.13.2 - {py3.7}-falcon-v2.0.0 + {py3.7,py3.8,py3.9}-falcon-v3.0.1 {py3.7,py3.11,py3.12}-falcon-v3.1.3 {py3.8,py3.11,py3.12}-falcon-v4.0.2 @@ -601,12 +601,13 @@ deps = # ~~~ Web 1 ~~~ - django-v1.11.29: django==1.11.29 + django-v2.0.9: django==2.0.9 django-v2.2.28: django==2.2.28 django-v3.2.25: django==3.2.25 django-v4.2.20: django==4.2.20 django-v5.0.9: django==5.0.9 django-v5.2: django==5.2 + django: channels[daphne] django: psycopg2-binary django: djangorestframework django: pytest-django @@ -616,19 +617,14 @@ deps = django-v5.0.9: pytest-asyncio django-v5.2: pytest-asyncio django-v2.2.28: six - django-v1.11.29: djangorestframework>=3.0,<4.0 - django-v1.11.29: Werkzeug<2.1.0 + django-v2.0.9: djangorestframework>=3.0,<4.0 + django-v2.0.9: Werkzeug<2.1.0 django-v2.2.28: djangorestframework>=3.0,<4.0 django-v2.2.28: Werkzeug<2.1.0 django-v3.2.25: djangorestframework>=3.0,<4.0 django-v3.2.25: Werkzeug<2.1.0 - django-v1.11.29: pytest-django<4.0 + django-v2.0.9: pytest-django<4.0 django-v2.2.28: pytest-django<4.0 - django-v2.2.28: channels[daphne] - django-v3.2.25: channels[daphne] - django-v4.2.20: channels[daphne] - django-v5.0.9: channels[daphne] - django-v5.2: channels[daphne] flask-v1.1.4: flask==1.1.4 flask-v2.3.3: flask==2.3.3 @@ -660,7 +656,7 @@ deps = bottle-v0.13.2: bottle==0.13.2 bottle: werkzeug<2.1.0 - falcon-v2.0.0: falcon==2.0.0 + falcon-v3.0.1: falcon==3.0.1 falcon-v3.1.3: falcon==3.1.3 falcon-v4.0.2: falcon==4.0.2 From 9977769096a615858dda40fb8b2d3252cd88b243 Mon Sep 17 00:00:00 2001 From: Daniel Szoke <7881302+szokeasaurusrex@users.noreply.github.com> Date: Wed, 9 Apr 2025 11:09:07 +0200 Subject: [PATCH 210/244] ref: Remove `SpanKwargs` and `TransactionKwargs` (#4254) --- sentry_sdk/api.py | 6 +- .../integrations/opentelemetry/scope.py | 5 +- sentry_sdk/scope.py | 6 +- sentry_sdk/tracing.py | 79 ------------------- 4 files changed, 4 insertions(+), 92 deletions(-) diff --git a/sentry_sdk/api.py b/sentry_sdk/api.py index 50c12dd636..ca4ad3846a 100644 --- a/sentry_sdk/api.py +++ b/sentry_sdk/api.py @@ -30,8 +30,6 @@ from typing import Union from typing import Generator - from typing_extensions import Unpack - from sentry_sdk.client import BaseClient from sentry_sdk._types import ( Event, @@ -42,7 +40,7 @@ MeasurementUnit, LogLevelStr, ) - from sentry_sdk.tracing import Span, TransactionKwargs + from sentry_sdk.tracing import Span T = TypeVar("T") F = TypeVar("F", bound=Callable[..., Any]) @@ -258,7 +256,7 @@ def start_span(**kwargs): def start_transaction( transaction=None, # type: Optional[Span] - **kwargs, # type: Unpack[TransactionKwargs] + **kwargs, # type: Any ): # type: (...) -> Span """ diff --git a/sentry_sdk/integrations/opentelemetry/scope.py b/sentry_sdk/integrations/opentelemetry/scope.py index 2cd734bcdd..53b9fd247c 100644 --- a/sentry_sdk/integrations/opentelemetry/scope.py +++ b/sentry_sdk/integrations/opentelemetry/scope.py @@ -34,9 +34,6 @@ if TYPE_CHECKING: from typing import Tuple, Optional, Generator, Dict, Any - from typing_extensions import Unpack - - from sentry_sdk.tracing import TransactionKwargs class PotelScope(Scope): @@ -136,7 +133,7 @@ def _incoming_otel_span_context(self): return span_context def start_transaction(self, **kwargs): - # type: (Unpack[TransactionKwargs]) -> Span + # type: (Any) -> Span """ .. deprecated:: 3.0.0 This function is deprecated and will be removed in a future release. diff --git a/sentry_sdk/scope.py b/sentry_sdk/scope.py index ff395dc1b2..e08e9f1ef8 100644 --- a/sentry_sdk/scope.py +++ b/sentry_sdk/scope.py @@ -56,8 +56,6 @@ from typing import Union from typing import Self - from typing_extensions import Unpack - from sentry_sdk._types import ( Breadcrumb, BreadcrumbHint, @@ -70,8 +68,6 @@ Type, ) - from sentry_sdk.tracing import TransactionKwargs - import sentry_sdk P = ParamSpec("P") @@ -910,7 +906,7 @@ def add_breadcrumb(self, crumb=None, hint=None, **kwargs): self._n_breadcrumbs_truncated += 1 def start_transaction(self, **kwargs): - # type: (Unpack[TransactionKwargs]) -> Union[NoOpSpan, Span] + # type: (Any) -> Union[NoOpSpan, Span] """ .. deprecated:: 3.0.0 This function is deprecated and will be removed in a future release. diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py index 010f2a3d2a..312aef14c1 100644 --- a/sentry_sdk/tracing.py +++ b/sentry_sdk/tracing.py @@ -38,8 +38,6 @@ from typing import Union from typing import TypeVar - from typing_extensions import TypedDict - P = ParamSpec("P") R = TypeVar("R") @@ -50,83 +48,6 @@ from sentry_sdk.tracing_utils import Baggage - class SpanKwargs(TypedDict, total=False): - trace_id: str - """ - The trace ID of the root span. If this new span is to be the root span, - omit this parameter, and a new trace ID will be generated. - """ - - span_id: str - """The span ID of this span. If omitted, a new span ID will be generated.""" - - parent_span_id: str - """The span ID of the parent span, if applicable.""" - - same_process_as_parent: bool - """Whether this span is in the same process as the parent span.""" - - sampled: bool - """ - Whether the span should be sampled. Overrides the default sampling decision - for this span when provided. - """ - - op: str - """ - The span's operation. A list of recommended values is available here: - https://develop.sentry.dev/sdk/performance/span-operations/ - """ - - description: str - """A description of what operation is being performed within the span. This argument is DEPRECATED. Please use the `name` parameter, instead.""" - - status: str - """The span's status. Possible values are listed at https://develop.sentry.dev/sdk/event-payloads/span/""" - - containing_transaction: Optional["Span"] - """The transaction that this span belongs to.""" - - start_timestamp: Optional[Union[datetime, float]] - """ - The timestamp when the span started. If omitted, the current time - will be used. - """ - - scope: "sentry_sdk.Scope" - """The scope to use for this span. If not provided, we use the current scope.""" - - origin: Optional[str] - """ - The origin of the span. - See https://develop.sentry.dev/sdk/performance/trace-origin/ - Default "manual". - """ - - name: str - """A string describing what operation is being performed within the span/transaction.""" - - class TransactionKwargs(SpanKwargs, total=False): - source: str - """ - A string describing the source of the transaction name. This will be used to determine the transaction's type. - See https://develop.sentry.dev/sdk/event-payloads/transaction/#transaction-annotations for more information. - Default "custom". - """ - - parent_sampled: bool - """Whether the parent transaction was sampled. If True this transaction will be kept, if False it will be discarded.""" - - baggage: "Baggage" - """The W3C baggage header value. (see https://www.w3.org/TR/baggage/)""" - - ProfileContext = TypedDict( - "ProfileContext", - { - "profiler_id": str, - }, - ) - BAGGAGE_HEADER_NAME = "baggage" SENTRY_TRACE_HEADER_NAME = "sentry-trace" From 9e4896590016b028ec42f8bd99a98f7d49c75d6d Mon Sep 17 00:00:00 2001 From: Ivana Kellyer Date: Wed, 9 Apr 2025 13:35:58 +0200 Subject: [PATCH 211/244] chore: Make `start_span` fail if unsupported args are provided (#4201) Make the `Span` constructor actually fail if it gets unsupported arguments, as opposed to silently ignoring them, so that folks get notified early. Deprecating some of these in https://github.com/getsentry/sentry-python/pull/4244 Closes https://github.com/getsentry/sentry-python/issues/4200 --- MIGRATION_GUIDE.md | 7 +++++-- sentry_sdk/tracing.py | 9 +++------ 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/MIGRATION_GUIDE.md b/MIGRATION_GUIDE.md index 748de16657..68717c5c14 100644 --- a/MIGRATION_GUIDE.md +++ b/MIGRATION_GUIDE.md @@ -131,7 +131,6 @@ Looking to upgrade from Sentry SDK 2.x to 3.x? Here's a comprehensive list of wh ### Removed -- Spans no longer have a `description`. Use `name` instead. - Dropped support for Python 3.6. - The `enable_tracing` `init` option has been removed. Configure `traces_sample_rate` directly. - The `propagate_traces` `init` option has been removed. Use `trace_propagation_targets` instead. @@ -157,11 +156,15 @@ Looking to upgrade from Sentry SDK 2.x to 3.x? Here's a comprehensive list of wh - `profiles_sample_rate` and `profiler_mode` were removed from options available via `_experiments`. Use the top-level `profiles_sample_rate` and `profiler_mode` options instead. - `Transport.capture_event` has been removed. Use `Transport.capture_envelope` instead. - Function transports are no longer supported. Subclass the `Transport` instead. +- `start_transaction` (`start_span`) no longer takes the following arguments: + - `trace_id`, `baggage`: use `continue_trace` for propagation from headers or environment variables + - `same_process_as_parent` + - `span_id` + - `parent_span_id`: you can supply a `parent_span` instead - Setting `Scope.transaction` directly is no longer supported. Use `Scope.set_transaction_name()` instead. - Passing a list or `None` for `failed_request_status_codes` in the Starlette integration is no longer supported. Pass a set of integers instead. - The `span` argument of `Scope.trace_propagation_meta` is no longer supported. - Setting `Scope.user` directly is no longer supported. Use `Scope.set_user()` instead. -- `start_transaction` (`start_span`) no longer takes a `baggage` argument. Use the `continue_trace()` context manager instead to propagate baggage. - Dropped support for Django versions below 2.0. - Dropped support for trytond versions below 5.0. - Dropped support for Falcon versions below 3.0. diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py index 312aef14c1..5a06d704ee 100644 --- a/sentry_sdk/tracing.py +++ b/sentry_sdk/tracing.py @@ -239,14 +239,9 @@ def __init__( only_if_parent=False, # type: bool parent_span=None, # type: Optional[Span] otel_span=None, # type: Optional[OtelSpan] - **_, # type: dict[str, object] ): # type: (...) -> None """ - For backwards compatibility with old the old Span interface, this class - accepts arbitrary keyword arguments, in addition to the ones explicitly - listed in the signature. These additional arguments are ignored. - If otel_span is passed explicitly, just acts as a proxy. If only_if_parent is True, just return an INVALID_SPAN @@ -284,6 +279,8 @@ def __init__( attributes[SentrySpanAttribute.OP] = op if source is not None: attributes[SentrySpanAttribute.SOURCE] = source + if description is not None: + attributes[SentrySpanAttribute.DESCRIPTION] = description if sampled is not None: attributes[SentrySpanAttribute.CUSTOM_SAMPLED] = sampled @@ -543,7 +540,7 @@ def timestamp(self): def start_child(self, **kwargs): # type: (**Any) -> Span - return Span(sampled=self.sampled, parent_span=self, **kwargs) + return Span(parent_span=self, **kwargs) def iter_headers(self): # type: () -> Iterator[Tuple[str, str]] From e8f99b3459ff7f386e8b68993cb771fb24cf1cf6 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Wed, 9 Apr 2025 15:10:40 +0200 Subject: [PATCH 212/244] Enable tracing without performance by default (#4240) Changing the default of `traces_sample_rate` to `0`. This means incoming traces will be continued, but we will not start traces on our own. (It used to be set to: never start or continue traces by default) Refs #4102 --- MIGRATION_GUIDE.md | 1 + sentry_sdk/consts.py | 2 +- tests/integrations/aiohttp/test_aiohttp.py | 5 +- tests/integrations/asgi/test_asgi.py | 8 +- tests/integrations/django/asgi/test_asgi.py | 2 + tests/integrations/django/test_basic.py | 1 + .../opentelemetry/test_sampler.py | 28 +- tests/integrations/wsgi/test_wsgi.py | 8 +- tests/test_dsc.py | 4 +- tests/tracing/test_sample_rand_propagation.py | 4 +- tests/tracing/test_sampling.py | 5 +- tests/tracing/test_trace_propagation.py | 282 ++++++++++++++++++ 12 files changed, 327 insertions(+), 23 deletions(-) create mode 100644 tests/tracing/test_trace_propagation.py diff --git a/MIGRATION_GUIDE.md b/MIGRATION_GUIDE.md index 68717c5c14..b784cb2a1a 100644 --- a/MIGRATION_GUIDE.md +++ b/MIGRATION_GUIDE.md @@ -9,6 +9,7 @@ Looking to upgrade from Sentry SDK 2.x to 3.x? Here's a comprehensive list of wh ### Changed - The SDK now supports Python 3.7 and higher. +- The default of `traces_sample_rate` changed to `0`. Meaning: Incoming traces will be continued by default. For example, if your frontend sends a `sentry-trace/baggage` headers pair, your SDK will create Spans and send them to Sentry. (The default used to be `None` meaning by default no Spans where created, no matter what headers the frontend sent to your project.) See also: https://docs.sentry.io/platforms/python/configuration/options/#traces_sample_rate - `sentry_sdk.start_span` now only takes keyword arguments. - `sentry_sdk.start_transaction`/`sentry_sdk.start_span` no longer takes the following arguments: `span`, `parent_sampled`, `trace_id`, `span_id` or `parent_span_id`. - You can no longer change the sampled status of a span with `span.sampled = False` after starting it. diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py index e9d7063105..2c164fba3a 100644 --- a/sentry_sdk/consts.py +++ b/sentry_sdk/consts.py @@ -511,7 +511,7 @@ def __init__( debug=None, # type: Optional[bool] attach_stacktrace=False, # type: bool ca_certs=None, # type: Optional[str] - traces_sample_rate=None, # type: Optional[float] + traces_sample_rate=0, # type: Optional[float] traces_sampler=None, # type: Optional[TracesSampler] profiles_sample_rate=None, # type: Optional[float] profiles_sampler=None, # type: Optional[TracesSampler] diff --git a/tests/integrations/aiohttp/test_aiohttp.py b/tests/integrations/aiohttp/test_aiohttp.py index 93560421c0..539216e0d6 100644 --- a/tests/integrations/aiohttp/test_aiohttp.py +++ b/tests/integrations/aiohttp/test_aiohttp.py @@ -448,7 +448,10 @@ async def hello(request): async def test_trace_from_headers_if_performance_disabled( sentry_init, aiohttp_client, capture_events ): - sentry_init(integrations=[AioHttpIntegration()]) + sentry_init( + integrations=[AioHttpIntegration()], + traces_sample_rate=None, # disable all performance monitoring + ) async def hello(request): capture_message("It's a good day to try dividing by 0") diff --git a/tests/integrations/asgi/test_asgi.py b/tests/integrations/asgi/test_asgi.py index d0ddef8611..6651642436 100644 --- a/tests/integrations/asgi/test_asgi.py +++ b/tests/integrations/asgi/test_asgi.py @@ -269,7 +269,9 @@ async def test_has_trace_if_performance_disabled( asgi3_app_with_error_and_msg, capture_events, ): - sentry_init() + sentry_init( + traces_sample_rate=None, # disable all performance monitoring + ) app = SentryAsgiMiddleware(asgi3_app_with_error_and_msg) with pytest.raises(ZeroDivisionError): @@ -325,7 +327,9 @@ async def test_trace_from_headers_if_performance_disabled( asgi3_app_with_error_and_msg, capture_events, ): - sentry_init() + sentry_init( + traces_sample_rate=None, # disable all performance monitoring + ) app = SentryAsgiMiddleware(asgi3_app_with_error_and_msg) trace_id = "582b43a4192642f0b136d5159a501701" diff --git a/tests/integrations/django/asgi/test_asgi.py b/tests/integrations/django/asgi/test_asgi.py index f083447ed2..e9df117309 100644 --- a/tests/integrations/django/asgi/test_asgi.py +++ b/tests/integrations/django/asgi/test_asgi.py @@ -321,6 +321,7 @@ async def test_has_trace_if_performance_enabled(sentry_init, capture_events): async def test_has_trace_if_performance_disabled(sentry_init, capture_events): sentry_init( integrations=[DjangoIntegration()], + traces_sample_rate=None, # disable all performance monitoring ) events = capture_events() @@ -386,6 +387,7 @@ async def test_trace_from_headers_if_performance_enabled(sentry_init, capture_ev async def test_trace_from_headers_if_performance_disabled(sentry_init, capture_events): sentry_init( integrations=[DjangoIntegration()], + traces_sample_rate=None, # disable all performance monitoring ) events = capture_events() diff --git a/tests/integrations/django/test_basic.py b/tests/integrations/django/test_basic.py index 5b75bbb6af..a37576315f 100644 --- a/tests/integrations/django/test_basic.py +++ b/tests/integrations/django/test_basic.py @@ -241,6 +241,7 @@ def test_trace_from_headers_if_performance_disabled( http_methods_to_capture=("HEAD",), ) ], + traces_sample_rate=None, # disable all performance monitoring ) events = capture_events() diff --git a/tests/integrations/opentelemetry/test_sampler.py b/tests/integrations/opentelemetry/test_sampler.py index 8cccab05be..7198f6b390 100644 --- a/tests/integrations/opentelemetry/test_sampler.py +++ b/tests/integrations/opentelemetry/test_sampler.py @@ -6,14 +6,16 @@ import sentry_sdk +USE_DEFAULT_TRACES_SAMPLE_RATE = -1 + tracer = trace.get_tracer(__name__) @pytest.mark.parametrize( "traces_sample_rate, expected_num_of_envelopes", [ - # special case for testing, do not pass any traces_sample_rate to init() (the default traces_sample_rate=None will be used) - (-1, 0), + # special case for testing, do not pass any traces_sample_rate to init() (the default traces_sample_rate=0 will be used) + (USE_DEFAULT_TRACES_SAMPLE_RATE, 0), # traces_sample_rate=None means do not create new traces, and also do not continue incoming traces. So, no envelopes at all. (None, 0), # traces_sample_rate=0 means do not create new traces (0% of the requests), but continue incoming traces. So envelopes will be created only if there is an incoming trace. @@ -29,7 +31,7 @@ def test_sampling_traces_sample_rate_0_or_100( expected_num_of_envelopes, ): kwargs = {} - if traces_sample_rate != -1: + if traces_sample_rate != USE_DEFAULT_TRACES_SAMPLE_RATE: kwargs["traces_sample_rate"] = traces_sample_rate sentry_init(**kwargs) @@ -176,8 +178,8 @@ def keep_only_a(sampling_context): @pytest.mark.parametrize( "traces_sample_rate, expected_num_of_envelopes", [ - # special case for testing, do not pass any traces_sample_rate to init() (the default traces_sample_rate=None will be used) - (-1, 0), + # special case for testing, do not pass any traces_sample_rate to init() (the default traces_sample_rate=0 will be used) + (USE_DEFAULT_TRACES_SAMPLE_RATE, 1), # traces_sample_rate=None means do not create new traces, and also do not continue incoming traces. So, no envelopes at all. (None, 0), # traces_sample_rate=0 means do not create new traces (0% of the requests), but continue incoming traces. So envelopes will be created only if there is an incoming trace. @@ -193,7 +195,7 @@ def test_sampling_parent_sampled( capture_envelopes, ): kwargs = {} - if traces_sample_rate != -1: + if traces_sample_rate != USE_DEFAULT_TRACES_SAMPLE_RATE: kwargs["traces_sample_rate"] = traces_sample_rate sentry_init(**kwargs) @@ -227,9 +229,11 @@ def test_sampling_parent_sampled( @pytest.mark.parametrize( "traces_sample_rate, upstream_sampled, expected_num_of_envelopes", [ - # special case for testing, do not pass any traces_sample_rate to init() (the default traces_sample_rate=None will be used) - (-1, 0, 0), + # special case for testing, do not pass any traces_sample_rate to init() (the default traces_sample_rate=0 will be used) + (USE_DEFAULT_TRACES_SAMPLE_RATE, 0, 0), + (USE_DEFAULT_TRACES_SAMPLE_RATE, 1, 1), # traces_sample_rate=None means do not create new traces, and also do not continue incoming traces. So, no envelopes at all. + (None, 0, 0), (None, 1, 0), # traces_sample_rate=0 means do not create new traces (0% of the requests), but continue incoming traces. So envelopes will be created only if there is an incoming trace. (0, 0, 0), @@ -247,7 +251,7 @@ def test_sampling_parent_dropped( capture_envelopes, ): kwargs = {} - if traces_sample_rate != -1: + if traces_sample_rate != USE_DEFAULT_TRACES_SAMPLE_RATE: kwargs["traces_sample_rate"] = traces_sample_rate sentry_init(**kwargs) @@ -281,8 +285,8 @@ def test_sampling_parent_dropped( @pytest.mark.parametrize( "traces_sample_rate, expected_num_of_envelopes", [ - # special case for testing, do not pass any traces_sample_rate to init() (the default traces_sample_rate=None will be used) - (-1, 0), + # special case for testing, do not pass any traces_sample_rate to init() (the default traces_sample_rate=0 will be used) + (USE_DEFAULT_TRACES_SAMPLE_RATE, 0), # traces_sample_rate=None means do not create new traces, and also do not continue incoming traces. So, no envelopes at all. (None, 0), # traces_sample_rate=0 means do not create new traces (0% of the requests), but continue incoming traces. So envelopes will be created only if there is an incoming trace. @@ -298,7 +302,7 @@ def test_sampling_parent_deferred( capture_envelopes, ): kwargs = {} - if traces_sample_rate != -1: + if traces_sample_rate != USE_DEFAULT_TRACES_SAMPLE_RATE: kwargs["traces_sample_rate"] = traces_sample_rate sentry_init(**kwargs) diff --git a/tests/integrations/wsgi/test_wsgi.py b/tests/integrations/wsgi/test_wsgi.py index 76c80f6c6a..149dd1d7d4 100644 --- a/tests/integrations/wsgi/test_wsgi.py +++ b/tests/integrations/wsgi/test_wsgi.py @@ -238,7 +238,9 @@ def dogpark(environ, start_response): capture_message("Attempting to fetch the ball") raise ValueError("Fetch aborted. The ball was not returned.") - sentry_init() + sentry_init( + traces_sample_rate=None, # disable all performance monitoring + ) app = SentryWsgiMiddleware(dogpark) client = Client(app) events = capture_events() @@ -301,7 +303,9 @@ def dogpark(environ, start_response): capture_message("Attempting to fetch the ball") raise ValueError("Fetch aborted. The ball was not returned.") - sentry_init() + sentry_init( + traces_sample_rate=None, # disable all performance monitoring + ) app = SentryWsgiMiddleware(dogpark) client = Client(app) events = capture_events() diff --git a/tests/test_dsc.py b/tests/test_dsc.py index ea3c0b8988..569b7fd3dc 100644 --- a/tests/test_dsc.py +++ b/tests/test_dsc.py @@ -287,8 +287,8 @@ def my_traces_sampler(sampling_context): "local_traces_sampler_result": None, "local_traces_sample_rate": None, }, - None, # expected_sample_rate - "tracing-disabled-no-transactions-should-be-sent", # expected_sampled (traces_sample_rate=None disables all transaction creation) + 1.0, # expected_sample_rate + "true", # expected_sampled ), ( # 6 traces_sampler overrides incoming (traces_sample_rate not set) { diff --git a/tests/tracing/test_sample_rand_propagation.py b/tests/tracing/test_sample_rand_propagation.py index f598b24154..17bf7a6168 100644 --- a/tests/tracing/test_sample_rand_propagation.py +++ b/tests/tracing/test_sample_rand_propagation.py @@ -18,7 +18,7 @@ def test_continue_trace_with_sample_rand(sentry_init): sentry_init() headers = { - "sentry-trace": "00000000000000000000000000000000-0000000000000000-0", + "sentry-trace": "771a43a4192642f0b136d5159a501700-1234567890abcdef-0", "baggage": "sentry-sample_rand=0.1,sentry-sample_rate=0.5", } @@ -34,7 +34,7 @@ def test_continue_trace_missing_sample_rand(sentry_init): sentry_init() headers = { - "sentry-trace": "00000000000000000000000000000000-0000000000000000", + "sentry-trace": "771a43a4192642f0b136d5159a501700-1234567890abcdef", "baggage": "sentry-placeholder=asdf", } diff --git a/tests/tracing/test_sampling.py b/tests/tracing/test_sampling.py index 59780729b7..bfd845d26d 100644 --- a/tests/tracing/test_sampling.py +++ b/tests/tracing/test_sampling.py @@ -310,7 +310,10 @@ def test_records_lost_event_only_if_traces_sampler_enabled( sampled_output, expected_record_lost_event_calls, ): - sentry_init(traces_sampler=traces_sampler) + sentry_init( + traces_sample_rate=None, + traces_sampler=traces_sampler, + ) record_lost_event_calls = capture_record_lost_event_calls() with start_span(name="dogpark") as span: diff --git a/tests/tracing/test_trace_propagation.py b/tests/tracing/test_trace_propagation.py new file mode 100644 index 0000000000..cb4c3fc90d --- /dev/null +++ b/tests/tracing/test_trace_propagation.py @@ -0,0 +1,282 @@ +import pytest +import requests +import sentry_sdk +from http.client import HTTPConnection + +USE_DEFAULT_TRACES_SAMPLE_RATE = -1 + +INCOMING_TRACE_ID = "771a43a4192642f0b136d5159a501700" +INCOMING_HEADERS = { + "sentry-trace": f"{INCOMING_TRACE_ID}-1234567890abcdef", + "baggage": ( + f"sentry-trace_id={INCOMING_TRACE_ID}, " + "sentry-public_key=frontendpublickey," + "sentry-sample_rate=0.01337," + "sentry-release=myfrontend," + "sentry-environment=bird," + "sentry-transaction=bar" + ), +} + + +# +# Proper high level testing for trace propagation. +# Testing the matrix of test cases described here: +# https://develop.sentry.dev/sdk/telemetry/traces/trace-propagation-cheat-sheet/ +# + + +@pytest.fixture +def _mock_putheader(monkeypatch): + """ + Mock HTTPConnection.putheader to capture calls to it. + """ + putheader_calls = [] + original_putheader = HTTPConnection.putheader + + def mock_putheader_fn(self, header, value): + putheader_calls.append((header, value)) + return original_putheader(self, header, value) + + monkeypatch.setattr(HTTPConnection, "putheader", mock_putheader_fn) + return putheader_calls + + +@pytest.mark.parametrize( + "traces_sample_rate", + [ + USE_DEFAULT_TRACES_SAMPLE_RATE, + None, + 0, + 1, + ], + ids=[ + "traces_sample_rate=DEFAULT", + "traces_sample_rate=None", + "traces_sample_rate=0", + "traces_sample_rate=1", + ], +) +def test_no_incoming_trace_and_trace_propagation_targets_matching( + sentry_init, capture_events, _mock_putheader, traces_sample_rate +): + init_kwargs = {} + if traces_sample_rate != USE_DEFAULT_TRACES_SAMPLE_RATE: + init_kwargs["traces_sample_rate"] = traces_sample_rate + sentry_init(**init_kwargs) + + events = capture_events() + + NO_INCOMING_HEADERS = {} # noqa: N806 + + with sentry_sdk.continue_trace(NO_INCOMING_HEADERS): + with sentry_sdk.start_span(op="test", name="test"): + requests.get("http://example.com") + + # CHECK if performance data (a transaction/span) is sent to Sentry + if traces_sample_rate == 1: + assert len(events) == 1 + else: + assert len(events) == 0 + + outgoing_request_headers = {key: value for key, value in _mock_putheader} + + # CHECK if trace information is added to the outgoing request + assert "sentry-trace" in outgoing_request_headers + assert "baggage" in outgoing_request_headers + + # CHECK if incoming trace is continued + # (no assert necessary, because there is no incoming trace information) + + +@pytest.mark.parametrize( + "traces_sample_rate", + [ + USE_DEFAULT_TRACES_SAMPLE_RATE, + None, + 0, + 1, + ], + ids=[ + "traces_sample_rate=DEFAULT", + "traces_sample_rate=None", + "traces_sample_rate=0", + "traces_sample_rate=1", + ], +) +def test_no_incoming_trace_and_trace_propagation_targets_not_matching( + sentry_init, capture_events, _mock_putheader, traces_sample_rate +): + init_kwargs = { + "trace_propagation_targets": [ + "http://someothersite.com", + ], + } + if traces_sample_rate != USE_DEFAULT_TRACES_SAMPLE_RATE: + init_kwargs["traces_sample_rate"] = traces_sample_rate + sentry_init(**init_kwargs) + + events = capture_events() + + NO_INCOMING_HEADERS = {} # noqa: N806 + + with sentry_sdk.continue_trace(NO_INCOMING_HEADERS): + with sentry_sdk.start_span(op="test", name="test"): + requests.get("http://example.com") + + # CHECK if performance data (a transaction/span) is sent to Sentry + if traces_sample_rate == 1: + assert len(events) == 1 + else: + assert len(events) == 0 + + outgoing_request_headers = {key: value for key, value in _mock_putheader} + + # CHECK if trace information is added to the outgoing request + assert "sentry-trace" not in outgoing_request_headers + assert "baggage" not in outgoing_request_headers + + # CHECK if incoming trace is continued + # (no assert necessary, because there is no incoming trace information, and no outgoing trace information either) + + +@pytest.mark.parametrize( + "traces_sample_rate", + [ + USE_DEFAULT_TRACES_SAMPLE_RATE, + None, + 0, + 1, + ], + ids=[ + "traces_sample_rate=DEFAULT", + "traces_sample_rate=None", + "traces_sample_rate=0", + "traces_sample_rate=1", + ], +) +@pytest.mark.parametrize( + "incoming_parent_sampled", + ["deferred", "1", "0"], + ids=[ + "incoming_parent_sampled=DEFERRED", + "incoming_parent_sampled=1", + "incoming_parent_sampled=0", + ], +) +def test_with_incoming_trace_and_trace_propagation_targets_matching( + sentry_init, + capture_events, + _mock_putheader, + incoming_parent_sampled, + traces_sample_rate, +): + init_kwargs = {} + if traces_sample_rate != USE_DEFAULT_TRACES_SAMPLE_RATE: + init_kwargs["traces_sample_rate"] = traces_sample_rate + sentry_init(**init_kwargs) + + events = capture_events() + + incoming_headers = INCOMING_HEADERS.copy() + if incoming_parent_sampled != "deferred": + incoming_headers["sentry-trace"] += f"-{incoming_parent_sampled}" + incoming_headers[ + "baggage" + ] += f',sentry-sampled={"true" if incoming_parent_sampled == "1" else "false"}' # noqa: E231 + + with sentry_sdk.continue_trace(incoming_headers): + with sentry_sdk.start_span(op="test", name="test"): + requests.get("http://example.com") + + # CHECK if performance data (a transaction/span) is sent to Sentry + if traces_sample_rate is None or incoming_parent_sampled == "0": + assert len(events) == 0 + else: + if incoming_parent_sampled == "1" or traces_sample_rate == 1: + assert len(events) == 1 + else: + assert len(events) == 0 + + outgoing_request_headers = {key: value for key, value in _mock_putheader} + + # CHECK if trace information is added to the outgoing request + assert "sentry-trace" in outgoing_request_headers + assert "baggage" in outgoing_request_headers + + # CHECK if incoming trace is continued + # Always continue the incoming trace, no matter traces_sample_rate + assert INCOMING_TRACE_ID in outgoing_request_headers["sentry-trace"] + assert INCOMING_TRACE_ID in outgoing_request_headers["baggage"] + + +@pytest.mark.parametrize( + "traces_sample_rate", + [ + USE_DEFAULT_TRACES_SAMPLE_RATE, + None, + 0, + 1, + ], + ids=[ + "traces_sample_rate=DEFAULT", + "traces_sample_rate=None", + "traces_sample_rate=0", + "traces_sample_rate=1", + ], +) +@pytest.mark.parametrize( + "incoming_parent_sampled", + ["deferred", "1", "0"], + ids=[ + "incoming_parent_sampled=DEFERRED", + "incoming_parent_sampled=1", + "incoming_parent_sampled=0", + ], +) +def test_with_incoming_trace_and_trace_propagation_targets_not_matching( + sentry_init, + capture_events, + _mock_putheader, + incoming_parent_sampled, + traces_sample_rate, +): + init_kwargs = { + "trace_propagation_targets": [ + "http://someothersite.com", + ], + } + if traces_sample_rate != USE_DEFAULT_TRACES_SAMPLE_RATE: + init_kwargs["traces_sample_rate"] = traces_sample_rate + sentry_init(**init_kwargs) + + events = capture_events() + + incoming_headers = INCOMING_HEADERS.copy() + if incoming_parent_sampled != "deferred": + incoming_headers["sentry-trace"] += f"-{incoming_parent_sampled}" + incoming_headers[ + "baggage" + ] += f',sentry-sampled={"true" if incoming_parent_sampled == "1" else "false"}' # noqa: E231 + + with sentry_sdk.continue_trace(incoming_headers): + with sentry_sdk.start_span(op="test", name="test"): + requests.get("http://example.com") + + # CHECK if performance data (a transaction/span) is sent to Sentry + if traces_sample_rate is None or incoming_parent_sampled == "0": + assert len(events) == 0 + else: + if incoming_parent_sampled == "1" or traces_sample_rate == 1: + assert len(events) == 1 + else: + assert len(events) == 0 + + outgoing_request_headers = {key: value for key, value in _mock_putheader} + + # CHECK if trace information is added to the outgoing request + assert "sentry-trace" not in outgoing_request_headers + assert "baggage" not in outgoing_request_headers + + # CHECK if incoming trace is continued + # (no assert necessary, because the trace information is not added to the outgoing request (see previous asserts)) From cd0c8ff1f6c176ad91334cdd66fa4ee1618c87e7 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Wed, 9 Apr 2025 15:13:22 +0200 Subject: [PATCH 213/244] Deprecate `Span.set_data()` (#4261) Refs #4102 --- MIGRATION_GUIDE.md | 3 ++- sentry_sdk/ai/monitoring.py | 10 +++---- sentry_sdk/ai/utils.py | 2 +- sentry_sdk/integrations/aiohttp.py | 4 +-- sentry_sdk/integrations/anthropic.py | 16 +++++++----- sentry_sdk/integrations/boto3.py | 2 +- sentry_sdk/integrations/celery/__init__.py | 20 +++++++------- sentry_sdk/integrations/django/__init__.py | 10 +++---- sentry_sdk/integrations/django/caching.py | 12 ++++----- .../integrations/django/signals_handlers.py | 2 +- sentry_sdk/integrations/django/templates.py | 4 +-- sentry_sdk/integrations/graphene.py | 6 ++--- sentry_sdk/integrations/grpc/aio/client.py | 12 ++++----- sentry_sdk/integrations/grpc/client.py | 12 ++++----- sentry_sdk/integrations/httpx.py | 8 +++--- sentry_sdk/integrations/langchain.py | 2 +- sentry_sdk/integrations/redis/utils.py | 2 +- sentry_sdk/integrations/rust_tracing.py | 8 +++--- sentry_sdk/integrations/socket.py | 12 ++++----- sentry_sdk/integrations/sqlalchemy.py | 8 +++--- sentry_sdk/integrations/stdlib.py | 6 ++--- sentry_sdk/integrations/strawberry.py | 26 ++++++++++--------- sentry_sdk/tracing.py | 12 ++++++--- sentry_sdk/tracing_utils.py | 10 +++---- .../integrations/opentelemetry/test_potel.py | 4 +-- tests/test_scrubber.py | 4 +-- tests/tracing/test_misc.py | 8 +++--- 27 files changed, 119 insertions(+), 106 deletions(-) diff --git a/MIGRATION_GUIDE.md b/MIGRATION_GUIDE.md index b784cb2a1a..027600a765 100644 --- a/MIGRATION_GUIDE.md +++ b/MIGRATION_GUIDE.md @@ -172,7 +172,8 @@ Looking to upgrade from Sentry SDK 2.x to 3.x? Here's a comprehensive list of wh ### Deprecated -- `sentry_sdk.start_transaction` is deprecated. Use `sentry_sdk.start_span` instead. +- `sentry_sdk.start_transaction()` is deprecated. Use `sentry_sdk.start_span()` instead. +- `Span.set_data()` is deprecated. Use `Span.set_attribute()` instead. ## Upgrading to 2.0 diff --git a/sentry_sdk/ai/monitoring.py b/sentry_sdk/ai/monitoring.py index e149ebe7df..08b6482da5 100644 --- a/sentry_sdk/ai/monitoring.py +++ b/sentry_sdk/ai/monitoring.py @@ -39,9 +39,9 @@ def sync_wrapped(*args, **kwargs): for k, v in kwargs.pop("sentry_tags", {}).items(): span.set_tag(k, v) for k, v in kwargs.pop("sentry_data", {}).items(): - span.set_data(k, v) + span.set_attribute(k, v) if curr_pipeline: - span.set_data("ai.pipeline.name", curr_pipeline) + span.set_attribute("ai.pipeline.name", curr_pipeline) return f(*args, **kwargs) else: _ai_pipeline_name.set(description) @@ -70,9 +70,9 @@ async def async_wrapped(*args, **kwargs): for k, v in kwargs.pop("sentry_tags", {}).items(): span.set_tag(k, v) for k, v in kwargs.pop("sentry_data", {}).items(): - span.set_data(k, v) + span.set_attribute(k, v) if curr_pipeline: - span.set_data("ai.pipeline.name", curr_pipeline) + span.set_attribute("ai.pipeline.name", curr_pipeline) return await f(*args, **kwargs) else: _ai_pipeline_name.set(description) @@ -104,7 +104,7 @@ def record_token_usage( # type: (Span, Optional[int], Optional[int], Optional[int]) -> None ai_pipeline_name = get_ai_pipeline_name() if ai_pipeline_name: - span.set_data("ai.pipeline.name", ai_pipeline_name) + span.set_attribute("ai.pipeline.name", ai_pipeline_name) if prompt_tokens is not None: span.set_measurement("ai_prompt_tokens_used", value=prompt_tokens) if completion_tokens is not None: diff --git a/sentry_sdk/ai/utils.py b/sentry_sdk/ai/utils.py index ed3494f679..5868606940 100644 --- a/sentry_sdk/ai/utils.py +++ b/sentry_sdk/ai/utils.py @@ -29,4 +29,4 @@ def _normalize_data(data): def set_data_normalized(span, key, value): # type: (Span, str, Any) -> None normalized = _normalize_data(value) - span.set_data(key, normalized) + span.set_attribute(key, normalized) diff --git a/sentry_sdk/integrations/aiohttp.py b/sentry_sdk/integrations/aiohttp.py index 62af0406cb..8115063c9c 100644 --- a/sentry_sdk/integrations/aiohttp.py +++ b/sentry_sdk/integrations/aiohttp.py @@ -245,7 +245,7 @@ async def on_request_start(session, trace_config_ctx, params): data[SPANDATA.HTTP_FRAGMENT] = parsed_url.fragment for key, value in data.items(): - span.set_data(key, value) + span.set_attribute(key, value) client = sentry_sdk.get_client() @@ -291,7 +291,7 @@ async def on_request_end(session, trace_config_ctx, params): span = trace_config_ctx.span span.set_http_status(int(params.response.status)) - span.set_data("reason", params.response.reason) + span.set_attribute("reason", params.response.reason) span.finish() trace_config = TraceConfig() diff --git a/sentry_sdk/integrations/anthropic.py b/sentry_sdk/integrations/anthropic.py index 148b978b40..454b6f93ca 100644 --- a/sentry_sdk/integrations/anthropic.py +++ b/sentry_sdk/integrations/anthropic.py @@ -121,13 +121,13 @@ def _add_ai_data_to_span( with capture_internal_exceptions(): if should_send_default_pii() and integration.include_prompts: complete_message = "".join(content_blocks) - span.set_data( + span.set_attribute( SPANDATA.AI_RESPONSES, [{"type": "text", "text": complete_message}], ) total_tokens = input_tokens + output_tokens record_token_usage(span, input_tokens, output_tokens, total_tokens) - span.set_data(SPANDATA.AI_STREAMING, True) + span.set_attribute(SPANDATA.AI_STREAMING, True) def _sentry_patched_create_common(f, *args, **kwargs): @@ -159,15 +159,17 @@ def _sentry_patched_create_common(f, *args, **kwargs): model = kwargs.get("model") with capture_internal_exceptions(): - span.set_data(SPANDATA.AI_MODEL_ID, model) - span.set_data(SPANDATA.AI_STREAMING, False) + span.set_attribute(SPANDATA.AI_MODEL_ID, model) + span.set_attribute(SPANDATA.AI_STREAMING, False) if should_send_default_pii() and integration.include_prompts: - span.set_data(SPANDATA.AI_INPUT_MESSAGES, messages) + span.set_attribute(SPANDATA.AI_INPUT_MESSAGES, messages) if hasattr(result, "content"): if should_send_default_pii() and integration.include_prompts: - span.set_data(SPANDATA.AI_RESPONSES, _get_responses(result.content)) + span.set_attribute( + SPANDATA.AI_RESPONSES, _get_responses(result.content) + ) _calculate_token_usage(result, span) span.__exit__(None, None, None) @@ -215,7 +217,7 @@ async def new_iterator_async(): result._iterator = new_iterator() else: - span.set_data("unknown_response", True) + span.set_attribute("unknown_response", True) span.__exit__(None, None, None) return result diff --git a/sentry_sdk/integrations/boto3.py b/sentry_sdk/integrations/boto3.py index 936d15639d..65239b7548 100644 --- a/sentry_sdk/integrations/boto3.py +++ b/sentry_sdk/integrations/boto3.py @@ -77,7 +77,7 @@ def _sentry_request_created(service_id, request, operation_name, **kwargs): data[SPANDATA.HTTP_FRAGMENT] = parsed_url.fragment for key, value in data.items(): - span.set_data(key, value) + span.set_attribute(key, value) span.set_tag("aws.service_id", service_id) span.set_tag("aws.operation_name", operation_name) diff --git a/sentry_sdk/integrations/celery/__init__.py b/sentry_sdk/integrations/celery/__init__.py index 238704fa68..bbaf3aec77 100644 --- a/sentry_sdk/integrations/celery/__init__.py +++ b/sentry_sdk/integrations/celery/__init__.py @@ -343,7 +343,7 @@ def _set_messaging_destination_name(task, span): if delivery_info.get("exchange") == "" and routing_key is not None: # Empty exchange indicates the default exchange, meaning the tasks # are sent to the queue with the same name as the routing key. - span.set_data(SPANDATA.MESSAGING_DESTINATION_NAME, routing_key) + span.set_attribute(SPANDATA.MESSAGING_DESTINATION_NAME, routing_key) def _wrap_task_call(task, f): @@ -380,18 +380,20 @@ def _inner(*args, **kwargs): ) if latency is not None: - span.set_data(SPANDATA.MESSAGING_MESSAGE_RECEIVE_LATENCY, latency) + span.set_attribute( + SPANDATA.MESSAGING_MESSAGE_RECEIVE_LATENCY, latency + ) with capture_internal_exceptions(): - span.set_data(SPANDATA.MESSAGING_MESSAGE_ID, task.request.id) + span.set_attribute(SPANDATA.MESSAGING_MESSAGE_ID, task.request.id) with capture_internal_exceptions(): - span.set_data( + span.set_attribute( SPANDATA.MESSAGING_MESSAGE_RETRY_COUNT, task.request.retries ) with capture_internal_exceptions(): - span.set_data( + span.set_attribute( SPANDATA.MESSAGING_SYSTEM, task.app.connection().transport.driver_type, ) @@ -499,18 +501,18 @@ def sentry_publish(self, *args, **kwargs): only_if_parent=True, ) as span: if task_id is not None: - span.set_data(SPANDATA.MESSAGING_MESSAGE_ID, task_id) + span.set_attribute(SPANDATA.MESSAGING_MESSAGE_ID, task_id) if exchange == "" and routing_key is not None: # Empty exchange indicates the default exchange, meaning messages are # routed to the queue with the same name as the routing key. - span.set_data(SPANDATA.MESSAGING_DESTINATION_NAME, routing_key) + span.set_attribute(SPANDATA.MESSAGING_DESTINATION_NAME, routing_key) if retries is not None: - span.set_data(SPANDATA.MESSAGING_MESSAGE_RETRY_COUNT, retries) + span.set_attribute(SPANDATA.MESSAGING_MESSAGE_RETRY_COUNT, retries) with capture_internal_exceptions(): - span.set_data( + span.set_attribute( SPANDATA.MESSAGING_SYSTEM, self.connection.transport.driver_type ) diff --git a/sentry_sdk/integrations/django/__init__.py b/sentry_sdk/integrations/django/__init__.py index 27b53e52a8..a82cef2000 100644 --- a/sentry_sdk/integrations/django/__init__.py +++ b/sentry_sdk/integrations/django/__init__.py @@ -679,7 +679,7 @@ def _set_db_data(span, cursor_or_db): # type: (Span, Any) -> None db = cursor_or_db.db if hasattr(cursor_or_db, "db") else cursor_or_db vendor = db.vendor - span.set_data(SPANDATA.DB_SYSTEM, vendor) + span.set_attribute(SPANDATA.DB_SYSTEM, vendor) # Some custom backends override `__getattr__`, making it look like `cursor_or_db` # actually has a `connection` and the `connection` has a `get_dsn_parameters` @@ -712,16 +712,16 @@ def _set_db_data(span, cursor_or_db): db_name = connection_params.get("dbname") or connection_params.get("database") if db_name is not None: - span.set_data(SPANDATA.DB_NAME, db_name) + span.set_attribute(SPANDATA.DB_NAME, db_name) server_address = connection_params.get("host") if server_address is not None: - span.set_data(SPANDATA.SERVER_ADDRESS, server_address) + span.set_attribute(SPANDATA.SERVER_ADDRESS, server_address) server_port = connection_params.get("port") if server_port is not None: - span.set_data(SPANDATA.SERVER_PORT, str(server_port)) + span.set_attribute(SPANDATA.SERVER_PORT, str(server_port)) server_socket_address = connection_params.get("unix_socket") if server_socket_address is not None: - span.set_data(SPANDATA.SERVER_SOCKET_ADDRESS, server_socket_address) + span.set_attribute(SPANDATA.SERVER_SOCKET_ADDRESS, server_socket_address) diff --git a/sentry_sdk/integrations/django/caching.py b/sentry_sdk/integrations/django/caching.py index 562aec6e02..236da9f749 100644 --- a/sentry_sdk/integrations/django/caching.py +++ b/sentry_sdk/integrations/django/caching.py @@ -60,22 +60,22 @@ def _instrument_call( with capture_internal_exceptions(): if address is not None: - span.set_data(SPANDATA.NETWORK_PEER_ADDRESS, address) + span.set_attribute(SPANDATA.NETWORK_PEER_ADDRESS, address) if port is not None: - span.set_data(SPANDATA.NETWORK_PEER_PORT, port) + span.set_attribute(SPANDATA.NETWORK_PEER_PORT, port) key = _get_safe_key(method_name, args, kwargs) if key is not None: - span.set_data(SPANDATA.CACHE_KEY, key) + span.set_attribute(SPANDATA.CACHE_KEY, key) item_size = None if is_get_operation: if value: item_size = len(str(value)) - span.set_data(SPANDATA.CACHE_HIT, True) + span.set_attribute(SPANDATA.CACHE_HIT, True) else: - span.set_data(SPANDATA.CACHE_HIT, False) + span.set_attribute(SPANDATA.CACHE_HIT, False) else: # TODO: We don't handle `get_or_set` which we should arg_count = len(args) if arg_count >= 2: @@ -86,7 +86,7 @@ def _instrument_call( item_size = len(str(args[0])) if item_size is not None: - span.set_data(SPANDATA.CACHE_ITEM_SIZE, item_size) + span.set_attribute(SPANDATA.CACHE_ITEM_SIZE, item_size) return value diff --git a/sentry_sdk/integrations/django/signals_handlers.py b/sentry_sdk/integrations/django/signals_handlers.py index 69c1a3cdfb..6e398ddfc3 100644 --- a/sentry_sdk/integrations/django/signals_handlers.py +++ b/sentry_sdk/integrations/django/signals_handlers.py @@ -71,7 +71,7 @@ def wrapper(*args, **kwargs): origin=DjangoIntegration.origin, only_if_parent=True, ) as span: - span.set_data("signal", signal_name) + span.set_attribute("signal", signal_name) return receiver(*args, **kwargs) return wrapper diff --git a/sentry_sdk/integrations/django/templates.py b/sentry_sdk/integrations/django/templates.py index 53ccc60fc6..fd6e56b515 100644 --- a/sentry_sdk/integrations/django/templates.py +++ b/sentry_sdk/integrations/django/templates.py @@ -69,7 +69,7 @@ def rendered_content(self): ) as span: if isinstance(self.context_data, dict): for k, v in self.context_data.items(): - span.set_data(f"context.{k}", v) + span.set_attribute(f"context.{k}", v) return real_rendered_content.fget(self) SimpleTemplateResponse.rendered_content = rendered_content @@ -97,7 +97,7 @@ def render(request, template_name, context=None, *args, **kwargs): only_if_parent=True, ) as span: for k, v in context.items(): - span.set_data(f"context.{k}", v) + span.set_attribute(f"context.{k}", v) return real_render(request, template_name, context, *args, **kwargs) django.shortcuts.render = render diff --git a/sentry_sdk/integrations/graphene.py b/sentry_sdk/integrations/graphene.py index 877ebd0a7e..9269a4403c 100644 --- a/sentry_sdk/integrations/graphene.py +++ b/sentry_sdk/integrations/graphene.py @@ -138,7 +138,7 @@ def graphql_span(schema, source, kwargs): with sentry_sdk.start_span( op=op, name=operation_name, only_if_parent=True ) as graphql_span: - graphql_span.set_data("graphql.document", source) - graphql_span.set_data("graphql.operation.name", operation_name) - graphql_span.set_data("graphql.operation.type", operation_type) + graphql_span.set_attribute("graphql.document", source) + graphql_span.set_attribute("graphql.operation.name", operation_name) + graphql_span.set_attribute("graphql.operation.type", operation_type) yield diff --git a/sentry_sdk/integrations/grpc/aio/client.py b/sentry_sdk/integrations/grpc/aio/client.py index 2fd9f70bed..a8ea94276f 100644 --- a/sentry_sdk/integrations/grpc/aio/client.py +++ b/sentry_sdk/integrations/grpc/aio/client.py @@ -53,8 +53,8 @@ async def intercept_unary_unary( origin=SPAN_ORIGIN, only_if_parent=True, ) as span: - span.set_data("type", "unary unary") - span.set_data("method", method) + span.set_attribute("type", "unary unary") + span.set_attribute("method", method) client_call_details = self._update_client_call_details_metadata_from_scope( client_call_details @@ -62,7 +62,7 @@ async def intercept_unary_unary( response = await continuation(client_call_details, request) status_code = await response.code() - span.set_data("code", status_code.name) + span.set_attribute("code", status_code.name) return response @@ -86,8 +86,8 @@ async def intercept_unary_stream( origin=SPAN_ORIGIN, only_if_parent=True, ) as span: - span.set_data("type", "unary stream") - span.set_data("method", method) + span.set_attribute("type", "unary stream") + span.set_attribute("method", method) client_call_details = self._update_client_call_details_metadata_from_scope( client_call_details @@ -95,6 +95,6 @@ async def intercept_unary_stream( response = await continuation(client_call_details, request) # status_code = await response.code() - # span.set_data("code", status_code) + # span.set_attribute("code", status_code) return response diff --git a/sentry_sdk/integrations/grpc/client.py b/sentry_sdk/integrations/grpc/client.py index cb456fc9b4..b7a1ddd85e 100644 --- a/sentry_sdk/integrations/grpc/client.py +++ b/sentry_sdk/integrations/grpc/client.py @@ -33,15 +33,15 @@ def intercept_unary_unary(self, continuation, client_call_details, request): origin=SPAN_ORIGIN, only_if_parent=True, ) as span: - span.set_data("type", "unary unary") - span.set_data("method", method) + span.set_attribute("type", "unary unary") + span.set_attribute("method", method) client_call_details = self._update_client_call_details_metadata_from_scope( client_call_details ) response = continuation(client_call_details, request) - span.set_data("code", response.code().name) + span.set_attribute("code", response.code().name) return response @@ -55,8 +55,8 @@ def intercept_unary_stream(self, continuation, client_call_details, request): origin=SPAN_ORIGIN, only_if_parent=True, ) as span: - span.set_data("type", "unary stream") - span.set_data("method", method) + span.set_attribute("type", "unary stream") + span.set_attribute("method", method) client_call_details = self._update_client_call_details_metadata_from_scope( client_call_details @@ -66,7 +66,7 @@ def intercept_unary_stream(self, continuation, client_call_details, request): client_call_details, request ) # type: UnaryStreamCall # Setting code on unary-stream leads to execution getting stuck - # span.set_data("code", response.code().name) + # span.set_attribute("code", response.code().name) return response diff --git a/sentry_sdk/integrations/httpx.py b/sentry_sdk/integrations/httpx.py index 1ac2708f32..a7c391851c 100644 --- a/sentry_sdk/integrations/httpx.py +++ b/sentry_sdk/integrations/httpx.py @@ -72,7 +72,7 @@ def send(self, request, **kwargs): data[SPANDATA.HTTP_FRAGMENT] = parsed_url.fragment for key, value in data.items(): - span.set_data(key, value) + span.set_attribute(key, value) if should_propagate_trace(sentry_sdk.get_client(), str(request.url)): for ( @@ -93,7 +93,7 @@ def send(self, request, **kwargs): rv = real_send(self, request, **kwargs) span.set_http_status(rv.status_code) - span.set_data("reason", rv.reason_phrase) + span.set_attribute("reason", rv.reason_phrase) data[SPANDATA.HTTP_STATUS_CODE] = rv.status_code data["reason"] = rv.reason_phrase @@ -142,7 +142,7 @@ async def send(self, request, **kwargs): data[SPANDATA.HTTP_FRAGMENT] = parsed_url.fragment for key, value in data.items(): - span.set_data(key, value) + span.set_attribute(key, value) if should_propagate_trace(sentry_sdk.get_client(), str(request.url)): for ( @@ -165,7 +165,7 @@ async def send(self, request, **kwargs): rv = await real_send(self, request, **kwargs) span.set_http_status(rv.status_code) - span.set_data("reason", rv.reason_phrase) + span.set_attribute("reason", rv.reason_phrase) data[SPANDATA.HTTP_STATUS_CODE] = rv.status_code data["reason"] = rv.reason_phrase diff --git a/sentry_sdk/integrations/langchain.py b/sentry_sdk/integrations/langchain.py index c775f9d92b..3d40ff1dbc 100644 --- a/sentry_sdk/integrations/langchain.py +++ b/sentry_sdk/integrations/langchain.py @@ -229,7 +229,7 @@ def on_chat_model_start(self, serialized, messages, *, run_id, **kwargs): if not model and "anthropic" in all_params.get("_type"): model = "claude-2" if model: - span.set_data(SPANDATA.AI_MODEL_ID, model) + span.set_attribute(SPANDATA.AI_MODEL_ID, model) if should_send_default_pii() and self.include_prompts: set_data_normalized( span, diff --git a/sentry_sdk/integrations/redis/utils.py b/sentry_sdk/integrations/redis/utils.py index 9eb16c5bc4..58130582ce 100644 --- a/sentry_sdk/integrations/redis/utils.py +++ b/sentry_sdk/integrations/redis/utils.py @@ -36,7 +36,7 @@ def _update_span(span, *data_bags): if key in TAG_KEYS: span.set_tag(key, value) else: - span.set_data(key, value) + span.set_attribute(key, value) def _create_breadcrumb(message, *data_bags): diff --git a/sentry_sdk/integrations/rust_tracing.py b/sentry_sdk/integrations/rust_tracing.py index 9b5a83197e..acfe9bd7f4 100644 --- a/sentry_sdk/integrations/rust_tracing.py +++ b/sentry_sdk/integrations/rust_tracing.py @@ -214,9 +214,9 @@ def on_new_span(self, attrs: str, span_id: str) -> Optional[Span]: fields = metadata.get("fields", []) for field in fields: if self._include_tracing_fields(): - span.set_data(field, attrs.get(field)) + span.set_attribute(field, attrs.get(field)) else: - span.set_data(field, SENSITIVE_DATA_SUBSTITUTE) + span.set_attribute(field, SENSITIVE_DATA_SUBSTITUTE) return span @@ -229,9 +229,9 @@ def on_record(self, span_id: str, values: str, span: Optional[Span]) -> None: deserialized_values = json.loads(values) for key, value in deserialized_values.items(): if self._include_tracing_fields(): - span.set_data(key, value) + span.set_attribute(key, value) else: - span.set_data(key, SENSITIVE_DATA_SUBSTITUTE) + span.set_attribute(key, SENSITIVE_DATA_SUBSTITUTE) class RustTracingIntegration(Integration): diff --git a/sentry_sdk/integrations/socket.py b/sentry_sdk/integrations/socket.py index 08fd4a8f13..544a63c0f0 100644 --- a/sentry_sdk/integrations/socket.py +++ b/sentry_sdk/integrations/socket.py @@ -64,10 +64,10 @@ def create_connection( only_if_parent=True, ) as span: host, port = address - span.set_data("address.host", host) - span.set_data("address.port", port) - span.set_data("timeout", timeout) - span.set_data("source_address", source_address) + span.set_attribute("address.host", host) + span.set_attribute("address.port", port) + span.set_attribute("timeout", timeout) + span.set_attribute("source_address", source_address) return real_create_connection( address=address, timeout=timeout, source_address=source_address @@ -92,8 +92,8 @@ def getaddrinfo(host, port, family=0, type=0, proto=0, flags=0): origin=SocketIntegration.origin, only_if_parent=True, ) as span: - span.set_data("host", host) - span.set_data("port", port) + span.set_attribute("host", host) + span.set_attribute("port", port) return real_getaddrinfo(host, port, family, type, proto, flags) diff --git a/sentry_sdk/integrations/sqlalchemy.py b/sentry_sdk/integrations/sqlalchemy.py index b00c6b7551..4c4d8fde8c 100644 --- a/sentry_sdk/integrations/sqlalchemy.py +++ b/sentry_sdk/integrations/sqlalchemy.py @@ -128,19 +128,19 @@ def _set_db_data(span, conn): # type: (Span, Any) -> None db_system = _get_db_system(conn.engine.name) if db_system is not None: - span.set_data(SPANDATA.DB_SYSTEM, db_system) + span.set_attribute(SPANDATA.DB_SYSTEM, db_system) if conn.engine.url is None: return db_name = conn.engine.url.database if db_name is not None: - span.set_data(SPANDATA.DB_NAME, db_name) + span.set_attribute(SPANDATA.DB_NAME, db_name) server_address = conn.engine.url.host if server_address is not None: - span.set_data(SPANDATA.SERVER_ADDRESS, server_address) + span.set_attribute(SPANDATA.SERVER_ADDRESS, server_address) server_port = conn.engine.url.port if server_port is not None: - span.set_data(SPANDATA.SERVER_PORT, server_port) + span.set_attribute(SPANDATA.SERVER_PORT, server_port) diff --git a/sentry_sdk/integrations/stdlib.py b/sentry_sdk/integrations/stdlib.py index 49313bb0a5..adc0de4f28 100644 --- a/sentry_sdk/integrations/stdlib.py +++ b/sentry_sdk/integrations/stdlib.py @@ -108,7 +108,7 @@ def putrequest(self, method, url, *args, **kwargs): data[SPANDATA.HTTP_FRAGMENT] = parsed_url.fragment for key, value in data.items(): - span.set_data(key, value) + span.set_attribute(key, value) rv = real_putrequest(self, method, url, *args, **kwargs) @@ -147,7 +147,7 @@ def getresponse(self, *args, **kwargs): status_code = int(rv.status) span.set_http_status(status_code) - span.set_data("reason", rv.reason) + span.set_attribute("reason", rv.reason) sentry_sdk.add_breadcrumb( type="http", @@ -246,7 +246,7 @@ def sentry_patched_popen_init(self, *a, **kw): env["SUBPROCESS_" + k.upper().replace("-", "_")] = v if cwd: - span.set_data("subprocess.cwd", cwd) + span.set_attribute("subprocess.cwd", cwd) rv = old_popen_init(self, *a, **kw) diff --git a/sentry_sdk/integrations/strawberry.py b/sentry_sdk/integrations/strawberry.py index 608dfcbb8c..274ae8d1c9 100644 --- a/sentry_sdk/integrations/strawberry.py +++ b/sentry_sdk/integrations/strawberry.py @@ -182,9 +182,9 @@ def on_operation(self): origin=StrawberryIntegration.origin, only_if_parent=True, ) as graphql_span: - graphql_span.set_data("graphql.operation.type", operation_type) - graphql_span.set_data("graphql.document", self.execution_context.query) - graphql_span.set_data("graphql.resource_name", self._resource_name) + graphql_span.set_attribute("graphql.operation.type", operation_type) + graphql_span.set_attribute("graphql.document", self.execution_context.query) + graphql_span.set_attribute("graphql.resource_name", self._resource_name) yield @@ -192,7 +192,9 @@ def on_operation(self): self._operation_name = self.execution_context.operation_name if self._operation_name is not None: - graphql_span.set_data("graphql.operation.name", self._operation_name) + graphql_span.set_attribute( + "graphql.operation.name", self._operation_name + ) sentry_sdk.get_current_scope().set_transaction_name( self._operation_name, @@ -246,10 +248,10 @@ async def resolve(self, _next, root, info, *args, **kwargs): name="resolving {}".format(field_path), origin=StrawberryIntegration.origin, ) as span: - span.set_data("graphql.field_name", info.field_name) - span.set_data("graphql.parent_type", info.parent_type.name) - span.set_data("graphql.field_path", field_path) - span.set_data("graphql.path", ".".join(map(str, info.path.as_list()))) + span.set_attribute("graphql.field_name", info.field_name) + span.set_attribute("graphql.parent_type", info.parent_type.name) + span.set_attribute("graphql.field_path", field_path) + span.set_attribute("graphql.path", ".".join(map(str, info.path.as_list()))) return await self._resolve(_next, root, info, *args, **kwargs) @@ -267,10 +269,10 @@ def resolve(self, _next, root, info, *args, **kwargs): name="resolving {}".format(field_path), origin=StrawberryIntegration.origin, ) as span: - span.set_data("graphql.field_name", info.field_name) - span.set_data("graphql.parent_type", info.parent_type.name) - span.set_data("graphql.field_path", field_path) - span.set_data("graphql.path", ".".join(map(str, info.path.as_list()))) + span.set_attribute("graphql.field_name", info.field_name) + span.set_attribute("graphql.parent_type", info.parent_type.name) + span.set_attribute("graphql.field_path", field_path) + span.set_attribute("graphql.path", ".".join(map(str, info.path.as_list()))) return _next(root, info, *args, **kwargs) diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py index 5a06d704ee..587b7dbe9b 100644 --- a/sentry_sdk/tracing.py +++ b/sentry_sdk/tracing.py @@ -596,6 +596,12 @@ def set_tag(self, key, value): def set_data(self, key, value): # type: (str, Any) -> None + warnings.warn( + "`Span.set_data` is deprecated. Please use `Span.set_attribute` instead.", + DeprecationWarning, + stacklevel=2, + ) + # TODO-neel-potel we cannot add dicts here self.set_attribute(key, value) @@ -662,10 +668,10 @@ def set_measurement(self, name, value, unit=""): def set_thread(self, thread_id, thread_name): # type: (Optional[int], Optional[str]) -> None if thread_id is not None: - self.set_data(SPANDATA.THREAD_ID, str(thread_id)) + self.set_attribute(SPANDATA.THREAD_ID, str(thread_id)) if thread_name is not None: - self.set_data(SPANDATA.THREAD_NAME, thread_name) + self.set_attribute(SPANDATA.THREAD_NAME, thread_name) def update_active_thread(self): # type: () -> None @@ -674,7 +680,7 @@ def update_active_thread(self): def set_http_status(self, http_status): # type: (int) -> None - self.set_data(SPANDATA.HTTP_STATUS_CODE, http_status) + self.set_attribute(SPANDATA.HTTP_STATUS_CODE, http_status) self.set_status(get_span_status_from_http_code(http_status)) def is_success(self): diff --git a/sentry_sdk/tracing_utils.py b/sentry_sdk/tracing_utils.py index 4bc7c6aeff..27320ac589 100644 --- a/sentry_sdk/tracing_utils.py +++ b/sentry_sdk/tracing_utils.py @@ -151,7 +151,7 @@ def record_sql_queries( only_if_parent=True, ) as span: for k, v in data.items(): - span.set_data(k, v) + span.set_attribute(k, v) yield span @@ -249,14 +249,14 @@ def add_query_source(span): except Exception: lineno = None if lineno is not None: - span.set_data(SPANDATA.CODE_LINENO, frame.f_lineno) + span.set_attribute(SPANDATA.CODE_LINENO, frame.f_lineno) try: namespace = frame.f_globals.get("__name__") except Exception: namespace = None if namespace is not None: - span.set_data(SPANDATA.CODE_NAMESPACE, namespace) + span.set_attribute(SPANDATA.CODE_NAMESPACE, namespace) filepath = _get_frame_module_abs_path(frame) if filepath is not None: @@ -266,7 +266,7 @@ def add_query_source(span): in_app_path = filepath.replace(project_root, "").lstrip(os.sep) else: in_app_path = filepath - span.set_data(SPANDATA.CODE_FILEPATH, in_app_path) + span.set_attribute(SPANDATA.CODE_FILEPATH, in_app_path) try: code_function = frame.f_code.co_name @@ -274,7 +274,7 @@ def add_query_source(span): code_function = None if code_function is not None: - span.set_data(SPANDATA.CODE_FUNCTION, frame.f_code.co_name) + span.set_attribute(SPANDATA.CODE_FUNCTION, frame.f_code.co_name) def extract_sentrytrace_data(header): diff --git a/tests/integrations/opentelemetry/test_potel.py b/tests/integrations/opentelemetry/test_potel.py index 2d1d66c6d0..753f2b4cf2 100644 --- a/tests/integrations/opentelemetry/test_potel.py +++ b/tests/integrations/opentelemetry/test_potel.py @@ -229,9 +229,9 @@ def test_span_data_started_with_sentry(capture_envelopes): envelopes = capture_envelopes() with sentry_sdk.start_span(op="http", description="request") as request_span: - request_span.set_data("foo", "bar") + request_span.set_attribute("foo", "bar") with sentry_sdk.start_span(op="db", description="statement") as db_span: - db_span.set_data("baz", 42) + db_span.set_attribute("baz", 42) (envelope,) = envelopes (item,) = envelope.items diff --git a/tests/test_scrubber.py b/tests/test_scrubber.py index 3c466de79f..ee209da4b1 100644 --- a/tests/test_scrubber.py +++ b/tests/test_scrubber.py @@ -155,8 +155,8 @@ def test_span_data_scrubbing(sentry_init, capture_events): with start_span(name="hi"): with start_span(op="foo", name="bar") as span: - span.set_data("password", "secret") - span.set_data("datafoo", "databar") + span.set_attribute("password", "secret") + span.set_attribute("datafoo", "databar") (event,) = events assert event["spans"][0]["data"] == ApproxDict( diff --git a/tests/tracing/test_misc.py b/tests/tracing/test_misc.py index a807c6eb74..5b0213d6c6 100644 --- a/tests/tracing/test_misc.py +++ b/tests/tracing/test_misc.py @@ -37,8 +37,8 @@ def test_span_data_scrubbing_and_trimming(sentry_init, capture_events): with start_span(name="hi"): with start_span(op="foo", name="bar") as span: - span.set_data("password", "secret") - span.set_data("datafoo", "databar") + span.set_attribute("password", "secret") + span.set_attribute("datafoo", "databar") for i in range(10): with start_span(op=f"foo{i}"): @@ -83,9 +83,9 @@ def test_root_span_data(sentry_init, capture_events): with start_span(name="test-root-span"): root_span = sentry_sdk.get_current_span() - root_span.set_data("foo", "bar") + root_span.set_attribute("foo", "bar") with start_span(op="test-span") as span: - span.set_data("spanfoo", "spanbar") + span.set_attribute("spanfoo", "spanbar") assert len(events) == 1 From 6584ce0c16e631494a4c634a2cb4eb703eaaf7dd Mon Sep 17 00:00:00 2001 From: Daniel Szoke <7881302+szokeasaurusrex@users.noreply.github.com> Date: Wed, 9 Apr 2025 15:42:27 +0200 Subject: [PATCH 214/244] feat(tracing): Remove `containing_transaction` (#4255) BREAKING CHANGE: Remove `Span.containing_transaction`. Use `Span.root_span` instead. Closes #4253 --- Thank you for contributing to `sentry-python`! Please add tests to validate your changes, and lint your code using `tox -e linters`. Running the test suite on your PR might require maintainer approval. --- sentry_sdk/scope.py | 14 +++++++------- sentry_sdk/tracing.py | 19 +------------------ .../rust_tracing/test_rust_tracing.py | 2 +- 3 files changed, 9 insertions(+), 26 deletions(-) diff --git a/sentry_sdk/scope.py b/sentry_sdk/scope.py index e08e9f1ef8..4d69b2ff68 100644 --- a/sentry_sdk/scope.py +++ b/sentry_sdk/scope.py @@ -698,21 +698,21 @@ def transaction(self): return None # there is an orphan span on the scope - if self._span.containing_transaction is None: + if self._span.root_span is None: return None - # there is either a transaction (which is its own containing - # transaction) or a non-orphan span on the scope - return self._span.containing_transaction + # there is either a root span (which is its own root + # span) or a non-orphan span on the scope + return self._span.root_span def set_transaction_name(self, name, source=None): # type: (str, Optional[str]) -> None """Set the transaction name and optionally the transaction source.""" self._transaction = name - if self._span and self._span.containing_transaction: - self._span.containing_transaction.name = name + if self._span and self._span.root_span: + self._span.root_span.name = name if source: - self._span.containing_transaction.source = source + self._span.root_span.source = source if source: self._transaction_info["source"] = source diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py index 587b7dbe9b..740802e941 100644 --- a/sentry_sdk/tracing.py +++ b/sentry_sdk/tracing.py @@ -1,7 +1,6 @@ from datetime import datetime from enum import Enum import json -import warnings from opentelemetry import trace as otel_trace, context from opentelemetry.trace import ( @@ -139,7 +138,7 @@ def __repr__(self): return "<%s>" % self.__class__.__name__ @property - def containing_transaction(self): + def root_span(self): # type: () -> Optional[Span] return None @@ -385,22 +384,6 @@ def origin(self, value): self.set_attribute(SentrySpanAttribute.ORIGIN, value) - @property - def containing_transaction(self): - # type: () -> Optional[Span] - """ - Get the transaction this span is a child of. - - .. deprecated:: 3.0.0 - This will be removed in the future. Use :func:`root_span` instead. - """ - warnings.warn( - "Deprecated: This will be removed in the future. Use root_span instead.", - DeprecationWarning, - stacklevel=2, - ) - return self.root_span - @property def root_span(self): # type: () -> Optional[Span] diff --git a/tests/integrations/rust_tracing/test_rust_tracing.py b/tests/integrations/rust_tracing/test_rust_tracing.py index dc7ee86617..9ab64843c4 100644 --- a/tests/integrations/rust_tracing/test_rust_tracing.py +++ b/tests/integrations/rust_tracing/test_rust_tracing.py @@ -189,7 +189,7 @@ def test_on_new_span_without_transaction(sentry_init): rust_tracing.new_span(RustTracingLevel.Info, 3) current_span = sentry_sdk.get_current_span() assert current_span is not None - assert current_span.containing_transaction is None + assert current_span.root_span is None def test_on_event_exception(sentry_init, capture_events): From c4255331d50803dd4c04bf0ec6bb0ff59c0026e7 Mon Sep 17 00:00:00 2001 From: Daniel Szoke <7881302+szokeasaurusrex@users.noreply.github.com> Date: Thu, 10 Apr 2025 09:16:48 +0200 Subject: [PATCH 215/244] fix: Import `warnings` in `tracing` (#4264) --- sentry_sdk/tracing.py | 1 + 1 file changed, 1 insertion(+) diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py index 740802e941..37f56453bf 100644 --- a/sentry_sdk/tracing.py +++ b/sentry_sdk/tracing.py @@ -1,6 +1,7 @@ from datetime import datetime from enum import Enum import json +import warnings from opentelemetry import trace as otel_trace, context from opentelemetry.trace import ( From 32369cbaa726a9999f2ca63854ce9970730d6fe0 Mon Sep 17 00:00:00 2001 From: Ivana Kellyer Date: Thu, 10 Apr 2025 12:02:11 +0200 Subject: [PATCH 216/244] ref: Move sampler out of `integrations/opentelemetry/` (#4258) Moving stuff out of `integrations/opentelemetry/` step by step since there is no OpenTelemetry integration anymore -- it's part of the core SDK. This moves `sentry_sdk/integrations/opentelemetry/sampler.py` -> `sentry_sdk/opentelemetry/sampler.py` Ref https://github.com/getsentry/sentry-python/issues/3853 --- sentry_sdk/integrations/opentelemetry/integration.py | 2 +- sentry_sdk/integrations/opentelemetry/span_processor.py | 2 +- sentry_sdk/opentelemetry/__init__.py | 5 +++++ sentry_sdk/{integrations => }/opentelemetry/sampler.py | 0 4 files changed, 7 insertions(+), 2 deletions(-) create mode 100644 sentry_sdk/opentelemetry/__init__.py rename sentry_sdk/{integrations => }/opentelemetry/sampler.py (100%) diff --git a/sentry_sdk/integrations/opentelemetry/integration.py b/sentry_sdk/integrations/opentelemetry/integration.py index 1124e736ed..581918e81c 100644 --- a/sentry_sdk/integrations/opentelemetry/integration.py +++ b/sentry_sdk/integrations/opentelemetry/integration.py @@ -9,7 +9,7 @@ from sentry_sdk.integrations.opentelemetry.span_processor import ( SentrySpanProcessor, ) -from sentry_sdk.integrations.opentelemetry.sampler import SentrySampler +from sentry_sdk.opentelemetry.sampler import SentrySampler from sentry_sdk.utils import logger try: diff --git a/sentry_sdk/integrations/opentelemetry/span_processor.py b/sentry_sdk/integrations/opentelemetry/span_processor.py index d82d6a03e9..ec936125c3 100644 --- a/sentry_sdk/integrations/opentelemetry/span_processor.py +++ b/sentry_sdk/integrations/opentelemetry/span_processor.py @@ -15,13 +15,13 @@ from sentry_sdk.consts import SPANDATA from sentry_sdk.tracing import DEFAULT_SPAN_ORIGIN from sentry_sdk.utils import get_current_thread_meta +from sentry_sdk.opentelemetry.sampler import create_sampling_context from sentry_sdk.profiler.continuous_profiler import ( try_autostart_continuous_profiler, get_profiler_id, try_profile_lifecycle_trace_start, ) from sentry_sdk.profiler.transaction_profiler import Profile -from sentry_sdk.integrations.opentelemetry.sampler import create_sampling_context from sentry_sdk.integrations.opentelemetry.utils import ( is_sentry_span, convert_from_otel_timestamp, diff --git a/sentry_sdk/opentelemetry/__init__.py b/sentry_sdk/opentelemetry/__init__.py new file mode 100644 index 0000000000..6901bcd31a --- /dev/null +++ b/sentry_sdk/opentelemetry/__init__.py @@ -0,0 +1,5 @@ +from sentry_sdk.opentelemetry.sampler import SentrySampler + +__all__ = [ + "SentrySampler", +] diff --git a/sentry_sdk/integrations/opentelemetry/sampler.py b/sentry_sdk/opentelemetry/sampler.py similarity index 100% rename from sentry_sdk/integrations/opentelemetry/sampler.py rename to sentry_sdk/opentelemetry/sampler.py From cc90218b6d74c3e87a10fa60a69db4ac9788bab7 Mon Sep 17 00:00:00 2001 From: Burak Yigit Kaya Date: Thu, 10 Apr 2025 13:20:10 +0300 Subject: [PATCH 217/244] [breaking] Enable Django `cache_spans` by default (#3994) This reverts commit 955108e5642d74d9d95535c2a1f263fcbbc62c92 (#3791) and simply enables `cache_spans` by default. Co-authored-by: Anton Pirker Co-authored-by: Ivana Kellyer --- sentry_sdk/integrations/django/__init__.py | 2 +- sentry_sdk/integrations/django/caching.py | 22 ++++++---------------- 2 files changed, 7 insertions(+), 17 deletions(-) diff --git a/sentry_sdk/integrations/django/__init__.py b/sentry_sdk/integrations/django/__init__.py index a82cef2000..0681e734ea 100644 --- a/sentry_sdk/integrations/django/__init__.py +++ b/sentry_sdk/integrations/django/__init__.py @@ -115,7 +115,7 @@ def __init__( transaction_style="url", # type: str middleware_spans=True, # type: bool signals_spans=True, # type: bool - cache_spans=False, # type: bool + cache_spans=True, # type: bool signals_denylist=None, # type: Optional[list[signals.Signal]] http_methods_to_capture=DEFAULT_HTTP_METHODS_TO_CAPTURE, # type: tuple[str, ...] ): diff --git a/sentry_sdk/integrations/django/caching.py b/sentry_sdk/integrations/django/caching.py index 236da9f749..65bf2674e1 100644 --- a/sentry_sdk/integrations/django/caching.py +++ b/sentry_sdk/integrations/django/caching.py @@ -134,22 +134,10 @@ def _get_address_port(settings): return address, int(port) if port is not None else None -def should_enable_cache_spans(): - # type: () -> bool - from sentry_sdk.integrations.django import DjangoIntegration - - client = sentry_sdk.get_client() - integration = client.get_integration(DjangoIntegration) - from django.conf import settings - - return integration is not None and ( - (client.spotlight is not None and settings.DEBUG is True) - or integration.cache_spans is True - ) - - def patch_caching(): # type: () -> None + from sentry_sdk.integrations.django import DjangoIntegration + if not hasattr(CacheHandler, "_sentry_patched"): if DJANGO_VERSION < (3, 2): original_get_item = CacheHandler.__getitem__ @@ -159,7 +147,8 @@ def sentry_get_item(self, alias): # type: (CacheHandler, str) -> Any cache = original_get_item(self, alias) - if should_enable_cache_spans(): + integration = sentry_sdk.get_client().get_integration(DjangoIntegration) + if integration is not None and integration.cache_spans: from django.conf import settings address, port = _get_address_port( @@ -181,7 +170,8 @@ def sentry_create_connection(self, alias): # type: (CacheHandler, str) -> Any cache = original_create_connection(self, alias) - if should_enable_cache_spans(): + integration = sentry_sdk.get_client().get_integration(DjangoIntegration) + if integration is not None and integration.cache_spans: address, port = _get_address_port(self.settings[alias or "default"]) _patch_cache(cache, address, port) From f4da2c3b41d3e8d1292b0698599308de8e73b3b7 Mon Sep 17 00:00:00 2001 From: Ivana Kellyer Date: Thu, 10 Apr 2025 14:24:22 +0200 Subject: [PATCH 218/244] Update tox.ini --- tox.ini | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/tox.ini b/tox.ini index e924e6a3b1..3777f8f0ca 100644 --- a/tox.ini +++ b/tox.ini @@ -10,7 +10,7 @@ # The file (and all resulting CI YAMLs) then need to be regenerated via # "scripts/generate-test-files.sh". # -# Last generated: 2025-04-08T10:33:11.499210+00:00 +# Last generated: 2025-04-10T12:24:07.555804+00:00 [tox] requires = @@ -156,7 +156,7 @@ envlist = {py3.7}-pymongo-v3.7.2 {py3.7,py3.10,py3.11}-pymongo-v3.13.0 {py3.7,py3.9,py3.10}-pymongo-v4.0.2 - {py3.9,py3.12,py3.13}-pymongo-v4.11.3 + {py3.9,py3.12,py3.13}-pymongo-v4.12.0 {py3.7}-redis_py_cluster_legacy-v2.0.0 {py3.7,py3.8}-redis_py_cluster_legacy-v2.1.3 @@ -173,7 +173,7 @@ envlist = {py3.8,py3.12,py3.13}-launchdarkly-v9.10.0 {py3.8,py3.12,py3.13}-openfeature-v0.7.5 - {py3.9,py3.12,py3.13}-openfeature-v0.8.0 + {py3.9,py3.12,py3.13}-openfeature-v0.8.1 {py3.7,py3.12,py3.13}-statsig-v0.55.3 {py3.7,py3.12,py3.13}-statsig-v0.56.0 @@ -208,11 +208,12 @@ envlist = {py3.7,py3.9,py3.10}-grpc-v1.44.0 {py3.7,py3.10,py3.11}-grpc-v1.58.3 {py3.9,py3.12,py3.13}-grpc-v1.71.0 + {py3.9,py3.12,py3.13}-grpc-v1.72.0rc1 # ~~~ Tasks ~~~ - {py3.6,py3.7,py3.8}-celery-v4.4.7 - {py3.6,py3.7,py3.8}-celery-v5.0.5 + {py3.8}-celery-v4.4.7 + {py3.8}-celery-v5.0.5 {py3.8,py3.12,py3.13}-celery-v5.5.1 {py3.7}-dramatiq-v1.9.0 @@ -510,7 +511,7 @@ deps = pymongo-v3.7.2: pymongo==3.7.2 pymongo-v3.13.0: pymongo==3.13.0 pymongo-v4.0.2: pymongo==4.0.2 - pymongo-v4.11.3: pymongo==4.11.3 + pymongo-v4.12.0: pymongo==4.12.0 pymongo: mockupdb redis_py_cluster_legacy-v2.0.0: redis-py-cluster==2.0.0 @@ -528,7 +529,7 @@ deps = launchdarkly-v9.10.0: launchdarkly-server-sdk==9.10.0 openfeature-v0.7.5: openfeature-sdk==0.7.5 - openfeature-v0.8.0: openfeature-sdk==0.8.0 + openfeature-v0.8.1: openfeature-sdk==0.8.1 statsig-v0.55.3: statsig==0.55.3 statsig-v0.56.0: statsig==0.56.0 @@ -576,6 +577,7 @@ deps = grpc-v1.44.0: grpcio==1.44.0 grpc-v1.58.3: grpcio==1.58.3 grpc-v1.71.0: grpcio==1.71.0 + grpc-v1.72.0rc1: grpcio==1.72.0rc1 grpc: protobuf grpc: mypy-protobuf grpc: types-protobuf From b30574a306b479c5facef69a03a0332068578779 Mon Sep 17 00:00:00 2001 From: Daniel Szoke Date: Wed, 9 Apr 2025 16:58:43 +0200 Subject: [PATCH 219/244] feat(scope): Replace `transaction` with `root_span` Closes #4235 --- MIGRATION_GUIDE.md | 2 +- sentry_sdk/api.py | 2 +- sentry_sdk/client.py | 2 +- sentry_sdk/integrations/arq.py | 10 +++++----- sentry_sdk/integrations/django/asgi.py | 4 ++-- sentry_sdk/integrations/django/views.py | 4 ++-- sentry_sdk/integrations/fastapi.py | 4 ++-- sentry_sdk/integrations/huey.py | 6 +++--- sentry_sdk/integrations/quart.py | 4 ++-- sentry_sdk/integrations/starlette.py | 4 ++-- sentry_sdk/scope.py | 4 ++-- tests/test_scope.py | 15 +++++++++++++++ 12 files changed, 38 insertions(+), 23 deletions(-) diff --git a/MIGRATION_GUIDE.md b/MIGRATION_GUIDE.md index 027600a765..998dec6dad 100644 --- a/MIGRATION_GUIDE.md +++ b/MIGRATION_GUIDE.md @@ -162,7 +162,7 @@ Looking to upgrade from Sentry SDK 2.x to 3.x? Here's a comprehensive list of wh - `same_process_as_parent` - `span_id` - `parent_span_id`: you can supply a `parent_span` instead -- Setting `Scope.transaction` directly is no longer supported. Use `Scope.set_transaction_name()` instead. +- The `Scope.transaction` property has been removed. To obtain the root span, use `Scope.root_span`. To set the root span's name, use `Scope.set_transaction_name()`. - Passing a list or `None` for `failed_request_status_codes` in the Starlette integration is no longer supported. Pass a set of integers instead. - The `span` argument of `Scope.trace_propagation_meta` is no longer supported. - Setting `Scope.user` directly is no longer supported. Use `Scope.set_user()` instead. diff --git a/sentry_sdk/api.py b/sentry_sdk/api.py index ca4ad3846a..475bbf5ef5 100644 --- a/sentry_sdk/api.py +++ b/sentry_sdk/api.py @@ -299,7 +299,7 @@ def start_transaction( def set_measurement(name, value, unit=""): # type: (str, float, MeasurementUnit) -> None - transaction = get_current_scope().transaction + transaction = get_current_scope().root_span if transaction is not None: transaction.set_measurement(name, value, unit) diff --git a/sentry_sdk/client.py b/sentry_sdk/client.py index f2acb8d489..c4599cd840 100644 --- a/sentry_sdk/client.py +++ b/sentry_sdk/client.py @@ -858,7 +858,7 @@ def _capture_experimental_log(self, current_scope, log): log["attributes"]["sentry.trace.parent_span_id"] = span.span_id if log.get("trace_id") is None: - transaction = current_scope.transaction + transaction = current_scope.root_span propagation_context = isolation_scope.get_active_propagation_context() if transaction is not None: log["trace_id"] = transaction.trace_id diff --git a/sentry_sdk/integrations/arq.py b/sentry_sdk/integrations/arq.py index ee80d211f5..e9dd9d92ac 100644 --- a/sentry_sdk/integrations/arq.py +++ b/sentry_sdk/integrations/arq.py @@ -127,12 +127,12 @@ def _capture_exception(exc_info): # type: (ExcInfo) -> None scope = sentry_sdk.get_current_scope() - if scope.transaction is not None: + if scope.root_span is not None: if exc_info[0] in ARQ_CONTROL_FLOW_EXCEPTIONS: - scope.transaction.set_status(SPANSTATUS.ABORTED) + scope.root_span.set_status(SPANSTATUS.ABORTED) return - scope.transaction.set_status(SPANSTATUS.INTERNAL_ERROR) + scope.root_span.set_status(SPANSTATUS.INTERNAL_ERROR) event, hint = event_from_exception( exc_info, @@ -149,8 +149,8 @@ def event_processor(event, hint): with capture_internal_exceptions(): scope = sentry_sdk.get_current_scope() - if scope.transaction is not None: - scope.transaction.name = ctx["job_name"] + if scope.root_span is not None: + scope.root_span.name = ctx["job_name"] event["transaction"] = ctx["job_name"] tags = event.setdefault("tags", {}) diff --git a/sentry_sdk/integrations/django/asgi.py b/sentry_sdk/integrations/django/asgi.py index 0ca1c080fd..511de34855 100644 --- a/sentry_sdk/integrations/django/asgi.py +++ b/sentry_sdk/integrations/django/asgi.py @@ -176,8 +176,8 @@ def wrap_async_view(callback): async def sentry_wrapped_callback(request, *args, **kwargs): # type: (Any, *Any, **Any) -> Any current_scope = sentry_sdk.get_current_scope() - if current_scope.transaction is not None: - current_scope.transaction.update_active_thread() + if current_scope.root_span is not None: + current_scope.root_span.update_active_thread() sentry_scope = sentry_sdk.get_isolation_scope() if sentry_scope.profile is not None: diff --git a/sentry_sdk/integrations/django/views.py b/sentry_sdk/integrations/django/views.py index aa2140764c..6240ac6bbb 100644 --- a/sentry_sdk/integrations/django/views.py +++ b/sentry_sdk/integrations/django/views.py @@ -79,8 +79,8 @@ def _wrap_sync_view(callback): def sentry_wrapped_callback(request, *args, **kwargs): # type: (Any, *Any, **Any) -> Any current_scope = sentry_sdk.get_current_scope() - if current_scope.transaction is not None: - current_scope.transaction.update_active_thread() + if current_scope.root_span is not None: + current_scope.root_span.update_active_thread() sentry_scope = sentry_sdk.get_isolation_scope() # set the active thread id to the handler thread for sync views diff --git a/sentry_sdk/integrations/fastapi.py b/sentry_sdk/integrations/fastapi.py index 76c6adee0f..4a0080c6fd 100644 --- a/sentry_sdk/integrations/fastapi.py +++ b/sentry_sdk/integrations/fastapi.py @@ -89,8 +89,8 @@ def _sentry_get_request_handler(*args, **kwargs): def _sentry_call(*args, **kwargs): # type: (*Any, **Any) -> Any current_scope = sentry_sdk.get_current_scope() - if current_scope.transaction is not None: - current_scope.transaction.update_active_thread() + if current_scope.root_span is not None: + current_scope.root_span.update_active_thread() sentry_scope = sentry_sdk.get_isolation_scope() if sentry_scope.profile is not None: diff --git a/sentry_sdk/integrations/huey.py b/sentry_sdk/integrations/huey.py index 8e9d45a526..fcdbd4f9f9 100644 --- a/sentry_sdk/integrations/huey.py +++ b/sentry_sdk/integrations/huey.py @@ -111,10 +111,10 @@ def _capture_exception(exc_info): scope = sentry_sdk.get_current_scope() if exc_info[0] in HUEY_CONTROL_FLOW_EXCEPTIONS: - scope.transaction.set_status(SPANSTATUS.ABORTED) + scope.root_span.set_status(SPANSTATUS.ABORTED) return - scope.transaction.set_status(SPANSTATUS.INTERNAL_ERROR) + scope.root_span.set_status(SPANSTATUS.INTERNAL_ERROR) event, hint = event_from_exception( exc_info, client_options=sentry_sdk.get_client().options, @@ -136,7 +136,7 @@ def _sentry_execute(*args, **kwargs): _capture_exception(exc_info) reraise(*exc_info) else: - sentry_sdk.get_current_scope().transaction.set_status(SPANSTATUS.OK) + sentry_sdk.get_current_scope().root_span.set_status(SPANSTATUS.OK) return result diff --git a/sentry_sdk/integrations/quart.py b/sentry_sdk/integrations/quart.py index 51306bb4cd..f7f8ce7902 100644 --- a/sentry_sdk/integrations/quart.py +++ b/sentry_sdk/integrations/quart.py @@ -122,8 +122,8 @@ def decorator(old_func): def _sentry_func(*args, **kwargs): # type: (*Any, **Any) -> Any current_scope = sentry_sdk.get_current_scope() - if current_scope.transaction is not None: - current_scope.transaction.update_active_thread() + if current_scope.root_span is not None: + current_scope.root_span.update_active_thread() sentry_scope = sentry_sdk.get_isolation_scope() if sentry_scope.profile is not None: diff --git a/sentry_sdk/integrations/starlette.py b/sentry_sdk/integrations/starlette.py index e75889ea38..bdb842f596 100644 --- a/sentry_sdk/integrations/starlette.py +++ b/sentry_sdk/integrations/starlette.py @@ -477,8 +477,8 @@ def _sentry_sync_func(*args, **kwargs): return old_func(*args, **kwargs) current_scope = sentry_sdk.get_current_scope() - if current_scope.transaction is not None: - current_scope.transaction.update_active_thread() + if current_scope.root_span is not None: + current_scope.root_span.update_active_thread() sentry_scope = sentry_sdk.get_isolation_scope() if sentry_scope.profile is not None: diff --git a/sentry_sdk/scope.py b/sentry_sdk/scope.py index 4d69b2ff68..487b45b583 100644 --- a/sentry_sdk/scope.py +++ b/sentry_sdk/scope.py @@ -688,10 +688,10 @@ def fingerprint(self, value): self._fingerprint = value @property - def transaction(self): + def root_span(self): # type: () -> Any # would be type: () -> Optional[Span], see https://github.com/python/mypy/issues/3004 - """Return the transaction (root span) in the scope, if any.""" + """Return the root span in the scope, if any.""" # there is no span/transaction on the scope if self._span is None: diff --git a/tests/test_scope.py b/tests/test_scope.py index 98b9320944..c5a2d9380e 100644 --- a/tests/test_scope.py +++ b/tests/test_scope.py @@ -915,3 +915,18 @@ def test_last_event_id_cleared(sentry_init): Scope.get_isolation_scope().clear() assert Scope.last_event_id() is None, "last_event_id should be cleared" + + +def test_root_span(sentry_init): + sentry_init(traces_sample_rate=1.0) + + assert sentry_sdk.get_current_scope().root_span is None + + with sentry_sdk.start_span(name="test") as root_span: + assert sentry_sdk.get_current_scope().root_span == root_span + with sentry_sdk.start_span(name="child"): + assert sentry_sdk.get_current_scope().root_span == root_span + with sentry_sdk.start_span(name="grandchild"): + assert sentry_sdk.get_current_scope().root_span == root_span + + assert sentry_sdk.get_current_scope().root_span is None From ab802b5a0dc436a3821f19f77d6480945f7587e8 Mon Sep 17 00:00:00 2001 From: Daniel Szoke <7881302+szokeasaurusrex@users.noreply.github.com> Date: Thu, 10 Apr 2025 15:17:52 +0200 Subject: [PATCH 220/244] feat(scope): Replace `transaction` with `root_span` (#4263) Closes #4235 --------- Co-authored-by: Ivana Kellyer --- MIGRATION_GUIDE.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/MIGRATION_GUIDE.md b/MIGRATION_GUIDE.md index 998dec6dad..87a81313eb 100644 --- a/MIGRATION_GUIDE.md +++ b/MIGRATION_GUIDE.md @@ -162,7 +162,7 @@ Looking to upgrade from Sentry SDK 2.x to 3.x? Here's a comprehensive list of wh - `same_process_as_parent` - `span_id` - `parent_span_id`: you can supply a `parent_span` instead -- The `Scope.transaction` property has been removed. To obtain the root span, use `Scope.root_span`. To set the root span's name, use `Scope.set_transaction_name()`. +- The `Scope.transaction` property has been removed. To obtain the root span (previously transaction), use `Scope.root_span`. To set the root span's (transaction's) name, use `Scope.set_transaction_name()`. - Passing a list or `None` for `failed_request_status_codes` in the Starlette integration is no longer supported. Pass a set of integers instead. - The `span` argument of `Scope.trace_propagation_meta` is no longer supported. - Setting `Scope.user` directly is no longer supported. Use `Scope.set_user()` instead. From 6af99bd153dc29e27379d25b89ec3a20fb9eb845 Mon Sep 17 00:00:00 2001 From: Ivana Kellyer Date: Thu, 10 Apr 2025 16:10:35 +0200 Subject: [PATCH 221/244] ref: Move span processor out of `integrations/opentelemetry/` (#4259) Moving stuff out of `integrations/opentelemetry/` step by step since there is no OpenTelemetry integration anymore -- it's part of the core SDK. This moves `sentry_sdk/integrations/opentelemetry/span_processor.py` -> `sentry_sdk/opentelemetry/span_processor.py` Ref https://github.com/getsentry/sentry-python/issues/3853 --------- Co-authored-by: Daniel Szoke <7881302+szokeasaurusrex@users.noreply.github.com> --- sentry_sdk/integrations/opentelemetry/__init__.py | 2 -- sentry_sdk/integrations/opentelemetry/integration.py | 5 +---- sentry_sdk/opentelemetry/__init__.py | 2 ++ .../{integrations => }/opentelemetry/span_processor.py | 0 4 files changed, 3 insertions(+), 6 deletions(-) rename sentry_sdk/{integrations => }/opentelemetry/span_processor.py (100%) diff --git a/sentry_sdk/integrations/opentelemetry/__init__.py b/sentry_sdk/integrations/opentelemetry/__init__.py index 3c4c1a683d..5dbd23e3a7 100644 --- a/sentry_sdk/integrations/opentelemetry/__init__.py +++ b/sentry_sdk/integrations/opentelemetry/__init__.py @@ -1,7 +1,5 @@ -from sentry_sdk.integrations.opentelemetry.span_processor import SentrySpanProcessor from sentry_sdk.integrations.opentelemetry.propagator import SentryPropagator __all__ = [ "SentryPropagator", - "SentrySpanProcessor", ] diff --git a/sentry_sdk/integrations/opentelemetry/integration.py b/sentry_sdk/integrations/opentelemetry/integration.py index 581918e81c..8b9130ca33 100644 --- a/sentry_sdk/integrations/opentelemetry/integration.py +++ b/sentry_sdk/integrations/opentelemetry/integration.py @@ -6,10 +6,7 @@ from sentry_sdk.integrations import DidNotEnable, Integration from sentry_sdk.integrations.opentelemetry.propagator import SentryPropagator -from sentry_sdk.integrations.opentelemetry.span_processor import ( - SentrySpanProcessor, -) -from sentry_sdk.opentelemetry.sampler import SentrySampler +from sentry_sdk.opentelemetry import SentrySampler, SentrySpanProcessor from sentry_sdk.utils import logger try: diff --git a/sentry_sdk/opentelemetry/__init__.py b/sentry_sdk/opentelemetry/__init__.py index 6901bcd31a..b8d1b0a44f 100644 --- a/sentry_sdk/opentelemetry/__init__.py +++ b/sentry_sdk/opentelemetry/__init__.py @@ -1,5 +1,7 @@ from sentry_sdk.opentelemetry.sampler import SentrySampler +from sentry_sdk.opentelemetry.span_processor import SentrySpanProcessor __all__ = [ "SentrySampler", + "SentrySpanProcessor", ] diff --git a/sentry_sdk/integrations/opentelemetry/span_processor.py b/sentry_sdk/opentelemetry/span_processor.py similarity index 100% rename from sentry_sdk/integrations/opentelemetry/span_processor.py rename to sentry_sdk/opentelemetry/span_processor.py From 6c521d134538cb8f4dd5449fdca646519e4cab2a Mon Sep 17 00:00:00 2001 From: Ivana Kellyer Date: Thu, 10 Apr 2025 16:20:16 +0200 Subject: [PATCH 222/244] ref: Move propagator out of `integrations/opentelemetry/` (#4260) Moving stuff out of `integrations/opentelemetry/` step by step since there is no OpenTelemetry integration anymore -- it's part of the core SDK. This moves `sentry_sdk/integrations/opentelemetry/propagator.py` -> `sentry_sdk/opentelemetry/propagator.py` Ref https://github.com/getsentry/sentry-python/issues/3853 --------- Co-authored-by: Daniel Szoke <7881302+szokeasaurusrex@users.noreply.github.com> --- sentry_sdk/integrations/opentelemetry/__init__.py | 5 ----- sentry_sdk/integrations/opentelemetry/integration.py | 7 +++++-- sentry_sdk/opentelemetry/__init__.py | 2 ++ sentry_sdk/{integrations => }/opentelemetry/propagator.py | 0 setup.py | 2 +- tests/integrations/opentelemetry/test_entry_points.py | 2 +- tests/integrations/opentelemetry/test_propagator.py | 2 +- 7 files changed, 10 insertions(+), 10 deletions(-) rename sentry_sdk/{integrations => }/opentelemetry/propagator.py (100%) diff --git a/sentry_sdk/integrations/opentelemetry/__init__.py b/sentry_sdk/integrations/opentelemetry/__init__.py index 5dbd23e3a7..e69de29bb2 100644 --- a/sentry_sdk/integrations/opentelemetry/__init__.py +++ b/sentry_sdk/integrations/opentelemetry/__init__.py @@ -1,5 +0,0 @@ -from sentry_sdk.integrations.opentelemetry.propagator import SentryPropagator - -__all__ = [ - "SentryPropagator", -] diff --git a/sentry_sdk/integrations/opentelemetry/integration.py b/sentry_sdk/integrations/opentelemetry/integration.py index 8b9130ca33..ef5bd34f96 100644 --- a/sentry_sdk/integrations/opentelemetry/integration.py +++ b/sentry_sdk/integrations/opentelemetry/integration.py @@ -5,8 +5,11 @@ """ from sentry_sdk.integrations import DidNotEnable, Integration -from sentry_sdk.integrations.opentelemetry.propagator import SentryPropagator -from sentry_sdk.opentelemetry import SentrySampler, SentrySpanProcessor +from sentry_sdk.opentelemetry import ( + SentryPropagator, + SentrySampler, + SentrySpanProcessor, +) from sentry_sdk.utils import logger try: diff --git a/sentry_sdk/opentelemetry/__init__.py b/sentry_sdk/opentelemetry/__init__.py index b8d1b0a44f..2d057016c1 100644 --- a/sentry_sdk/opentelemetry/__init__.py +++ b/sentry_sdk/opentelemetry/__init__.py @@ -1,7 +1,9 @@ +from sentry_sdk.opentelemetry.propagator import SentryPropagator from sentry_sdk.opentelemetry.sampler import SentrySampler from sentry_sdk.opentelemetry.span_processor import SentrySpanProcessor __all__ = [ + "SentryPropagator", "SentrySampler", "SentrySpanProcessor", ] diff --git a/sentry_sdk/integrations/opentelemetry/propagator.py b/sentry_sdk/opentelemetry/propagator.py similarity index 100% rename from sentry_sdk/integrations/opentelemetry/propagator.py rename to sentry_sdk/opentelemetry/propagator.py diff --git a/setup.py b/setup.py index 5f5242d838..04bcbe2122 100644 --- a/setup.py +++ b/setup.py @@ -86,7 +86,7 @@ def get_file_text(file_name): }, entry_points={ "opentelemetry_propagator": [ - "sentry=sentry_sdk.integrations.opentelemetry:SentryPropagator" + "sentry=sentry_sdk.opentelemetry:SentryPropagator" ] }, classifiers=[ diff --git a/tests/integrations/opentelemetry/test_entry_points.py b/tests/integrations/opentelemetry/test_entry_points.py index cd78209432..efadb67a06 100644 --- a/tests/integrations/opentelemetry/test_entry_points.py +++ b/tests/integrations/opentelemetry/test_entry_points.py @@ -3,7 +3,7 @@ from unittest.mock import patch from opentelemetry import propagate -from sentry_sdk.integrations.opentelemetry import SentryPropagator +from sentry_sdk.opentelemetry import SentryPropagator def test_propagator_loaded_if_mentioned_in_environment_variable(): diff --git a/tests/integrations/opentelemetry/test_propagator.py b/tests/integrations/opentelemetry/test_propagator.py index b18e3bc400..f57679cc3e 100644 --- a/tests/integrations/opentelemetry/test_propagator.py +++ b/tests/integrations/opentelemetry/test_propagator.py @@ -10,7 +10,7 @@ SENTRY_BAGGAGE_KEY, SENTRY_TRACE_KEY, ) -from sentry_sdk.integrations.opentelemetry.propagator import SentryPropagator +from sentry_sdk.opentelemetry import SentryPropagator from tests.conftest import SortedBaggage From dfad14fda34c85450ea891bf005282ba39ae58e7 Mon Sep 17 00:00:00 2001 From: Ivana Kellyer Date: Thu, 10 Apr 2025 16:27:56 +0200 Subject: [PATCH 223/244] ref: Move contextvars_context out of integrations (#4271) Moving stuff out of `integrations/opentelemetry/` step by step since there is no OpenTelemetry integration anymore -- it's part of the core SDK. This moves `sentry_sdk/integrations/opentelemetry/contextvars_context.py` -> `sentry_sdk/opentelemetry/contextvars_context.py` Ref https://github.com/getsentry/sentry-python/issues/3853 --- sentry_sdk/integrations/opentelemetry/scope.py | 4 ++-- .../{integrations => }/opentelemetry/contextvars_context.py | 0 setup.py | 4 +--- 3 files changed, 3 insertions(+), 5 deletions(-) rename sentry_sdk/{integrations => }/opentelemetry/contextvars_context.py (100%) diff --git a/sentry_sdk/integrations/opentelemetry/scope.py b/sentry_sdk/integrations/opentelemetry/scope.py index 53b9fd247c..491404529a 100644 --- a/sentry_sdk/integrations/opentelemetry/scope.py +++ b/sentry_sdk/integrations/opentelemetry/scope.py @@ -24,10 +24,10 @@ SENTRY_USE_ISOLATION_SCOPE_KEY, TRACESTATE_SAMPLED_KEY, ) -from sentry_sdk.integrations.opentelemetry.contextvars_context import ( +from sentry_sdk.integrations.opentelemetry.utils import trace_state_from_baggage +from sentry_sdk.opentelemetry.contextvars_context import ( SentryContextVarsRuntimeContext, ) -from sentry_sdk.integrations.opentelemetry.utils import trace_state_from_baggage from sentry_sdk.scope import Scope, ScopeType from sentry_sdk.tracing import Span from sentry_sdk._types import TYPE_CHECKING diff --git a/sentry_sdk/integrations/opentelemetry/contextvars_context.py b/sentry_sdk/opentelemetry/contextvars_context.py similarity index 100% rename from sentry_sdk/integrations/opentelemetry/contextvars_context.py rename to sentry_sdk/opentelemetry/contextvars_context.py diff --git a/setup.py b/setup.py index 04bcbe2122..ecbb3e2f54 100644 --- a/setup.py +++ b/setup.py @@ -85,9 +85,7 @@ def get_file_text(file_name): "unleash": ["UnleashClient>=6.0.1"], }, entry_points={ - "opentelemetry_propagator": [ - "sentry=sentry_sdk.opentelemetry:SentryPropagator" - ] + "opentelemetry_propagator": ["sentry=sentry_sdk.opentelemetry:SentryPropagator"] }, classifiers=[ "Development Status :: 5 - Production/Stable", From e4f8f1fc3a2684fcf2130ca8707d1df6ac520c2c Mon Sep 17 00:00:00 2001 From: Daniel Szoke <7881302+szokeasaurusrex@users.noreply.github.com> Date: Mon, 14 Apr 2025 10:16:47 +0200 Subject: [PATCH 224/244] test(tracing): Simplify static/classmethod tracing tests (#4279) Port of #4278 for POTel. --- Thank you for contributing to `sentry-python`! Please add tests to validate your changes, and lint your code using `tox -e linters`. Running the test suite on your PR might require maintainer approval. --- tests/test_basics.py | 119 +++++++++++++++++++++++++++++++------------ 1 file changed, 86 insertions(+), 33 deletions(-) diff --git a/tests/test_basics.py b/tests/test_basics.py index e4f6f2636b..df6962c8a9 100644 --- a/tests/test_basics.py +++ b/tests/test_basics.py @@ -7,7 +7,6 @@ from datetime import datetime, timedelta, timezone import pytest -from tests.conftest import patch_start_tracing_child import sentry_sdk import sentry_sdk.scope @@ -769,46 +768,100 @@ def class_(cls, arg): return cls, arg -def test_staticmethod_tracing(sentry_init): - test_staticmethod_name = "tests.test_basics.TracingTestClass.static" +# We need to fork here because the test modifies tests.test_basics.TracingTestClass +@pytest.mark.forked +def test_staticmethod_class_tracing(sentry_init, capture_events): + sentry_init( + debug=True, + traces_sample_rate=1.0, + functions_to_trace=[ + {"qualified_name": "tests.test_basics.TracingTestClass.static"} + ], + ) - assert ( - ".".join( - [ - TracingTestClass.static.__module__, - TracingTestClass.static.__qualname__, - ] - ) - == test_staticmethod_name - ), "The test static method was moved or renamed. Please update the name accordingly" + events = capture_events() - sentry_init(functions_to_trace=[{"qualified_name": test_staticmethod_name}]) + with sentry_sdk.start_span(name="test"): + assert TracingTestClass.static(1) == 1 - for instance_or_class in (TracingTestClass, TracingTestClass()): - with patch_start_tracing_child() as fake_start_child: - assert instance_or_class.static(1) == 1 - assert fake_start_child.call_count == 1 + (event,) = events + assert event["type"] == "transaction" + assert event["transaction"] == "test" + (span,) = event["spans"] + assert span["description"] == "tests.test_basics.TracingTestClass.static" -def test_classmethod_tracing(sentry_init): - test_classmethod_name = "tests.test_basics.TracingTestClass.class_" - assert ( - ".".join( - [ - TracingTestClass.class_.__module__, - TracingTestClass.class_.__qualname__, - ] - ) - == test_classmethod_name - ), "The test class method was moved or renamed. Please update the name accordingly" +# We need to fork here because the test modifies tests.test_basics.TracingTestClass +@pytest.mark.forked +def test_staticmethod_instance_tracing(sentry_init, capture_events): + sentry_init( + debug=True, + traces_sample_rate=1.0, + functions_to_trace=[ + {"qualified_name": "tests.test_basics.TracingTestClass.static"} + ], + ) + + events = capture_events() + + with sentry_sdk.start_span(name="test"): + assert TracingTestClass().static(1) == 1 + + (event,) = events + assert event["type"] == "transaction" + assert event["transaction"] == "test" - sentry_init(functions_to_trace=[{"qualified_name": test_classmethod_name}]) + (span,) = event["spans"] + assert span["description"] == "tests.test_basics.TracingTestClass.static" + + +# We need to fork here because the test modifies tests.test_basics.TracingTestClass +@pytest.mark.forked +def test_classmethod_class_tracing(sentry_init, capture_events): + sentry_init( + debug=True, + traces_sample_rate=1.0, + functions_to_trace=[ + {"qualified_name": "tests.test_basics.TracingTestClass.class_"} + ], + ) + + events = capture_events() + + with sentry_sdk.start_span(name="test"): + assert TracingTestClass.class_(1) == (TracingTestClass, 1) + + (event,) = events + assert event["type"] == "transaction" + assert event["transaction"] == "test" + + (span,) = event["spans"] + assert span["description"] == "tests.test_basics.TracingTestClass.class_" + + +# We need to fork here because the test modifies tests.test_basics.TracingTestClass +@pytest.mark.forked +def test_classmethod_instance_tracing(sentry_init, capture_events): + sentry_init( + debug=True, + traces_sample_rate=1.0, + functions_to_trace=[ + {"qualified_name": "tests.test_basics.TracingTestClass.class_"} + ], + ) + + events = capture_events() + + with sentry_sdk.start_span(name="test"): + assert TracingTestClass().class_(1) == (TracingTestClass, 1) + + (event,) = events + assert event["type"] == "transaction" + assert event["transaction"] == "test" - for instance_or_class in (TracingTestClass, TracingTestClass()): - with patch_start_tracing_child() as fake_start_child: - assert instance_or_class.class_(1) == (TracingTestClass, 1) - assert fake_start_child.call_count == 1 + (span,) = event["spans"] + assert span["description"] == "tests.test_basics.TracingTestClass.class_" def test_last_event_id(sentry_init): From 759ded932cf1cb68a8016dfddb404d8e064df395 Mon Sep 17 00:00:00 2001 From: Ivana Kellyer Date: Mon, 14 Apr 2025 10:51:37 +0200 Subject: [PATCH 225/244] ref: Move consts, utils from `integrations/opentelemetry/` (#4272) Moving stuff out of `integrations/opentelemetry/` step by step since there is no OpenTelemetry integration anymore -- it's part of the core SDK. This moves `sentry_sdk/integrations/opentelemetry/{utils,consts}.py` -> `sentry_sdk/opentelemetry/{utils,consts}.py`. There's quite some stuff moving around in this PR in order to remove circular dependencies that have to do with `tracing.py`: - `get_span_status_from_http_code` from `tracing.py` is now in `tracing_utils.py` - various constants from `tracing.py` are now in `consts.py` Additionally, Sphinx was unhappy, so tweaked the way we type some things in `api.py` and `_init_implementation.py`. I'll possibly follow this up with creating a nice structure for the tracing files, maybe a common `tracing` directory with `tracing.py`, `tracing_utils.py` -> `utils.py`, and dedicated `consts.py`. Ref https://github.com/getsentry/sentry-python/issues/3853 --- sentry_sdk/_init_implementation.py | 5 +- sentry_sdk/api.py | 38 ++-- sentry_sdk/consts.py | 40 ++++ sentry_sdk/integrations/aiohttp.py | 14 +- sentry_sdk/integrations/asgi.py | 6 +- sentry_sdk/integrations/bottle.py | 2 +- sentry_sdk/integrations/celery/__init__.py | 4 +- sentry_sdk/integrations/django/__init__.py | 3 +- sentry_sdk/integrations/falcon.py | 2 +- sentry_sdk/integrations/fastapi.py | 2 +- sentry_sdk/integrations/flask.py | 2 +- sentry_sdk/integrations/httpx.py | 3 +- sentry_sdk/integrations/huey.py | 9 +- sentry_sdk/integrations/litestar.py | 3 +- .../integrations/opentelemetry/scope.py | 4 +- sentry_sdk/integrations/pyramid.py | 2 +- sentry_sdk/integrations/quart.py | 2 +- sentry_sdk/integrations/starlette.py | 6 +- sentry_sdk/integrations/starlite.py | 3 +- .../opentelemetry/consts.py | 2 - .../opentelemetry/contextvars_context.py | 2 +- sentry_sdk/opentelemetry/propagator.py | 10 +- sentry_sdk/opentelemetry/sampler.py | 12 +- sentry_sdk/opentelemetry/span_processor.py | 23 ++- .../{integrations => }/opentelemetry/utils.py | 11 +- sentry_sdk/scope.py | 9 +- sentry_sdk/tracing.py | 171 +++--------------- sentry_sdk/tracing_utils.py | 50 ++++- .../opentelemetry/test_propagator.py | 2 +- .../integrations/opentelemetry/test_utils.py | 2 +- 30 files changed, 189 insertions(+), 255 deletions(-) rename sentry_sdk/{integrations => }/opentelemetry/consts.py (91%) rename sentry_sdk/{integrations => }/opentelemetry/utils.py (98%) diff --git a/sentry_sdk/_init_implementation.py b/sentry_sdk/_init_implementation.py index 4ad110ab56..51bcc9029c 100644 --- a/sentry_sdk/_init_implementation.py +++ b/sentry_sdk/_init_implementation.py @@ -1,13 +1,12 @@ from typing import TYPE_CHECKING import sentry_sdk +from sentry_sdk.consts import ClientConstructor from sentry_sdk.integrations.opentelemetry.scope import setup_scope_context_management if TYPE_CHECKING: from typing import Any, Optional - import sentry_sdk.consts - def _check_python_deprecations(): # type: () -> None @@ -36,7 +35,7 @@ def _init(*args, **kwargs): # Use `ClientConstructor` to define the argument types of `init` and # `ContextManager[Any]` to tell static analyzers about the return type. - class init(sentry_sdk.consts.ClientConstructor): # noqa: N801 + class init(ClientConstructor): # noqa: N801 pass else: diff --git a/sentry_sdk/api.py b/sentry_sdk/api.py index 475bbf5ef5..0555244a7c 100644 --- a/sentry_sdk/api.py +++ b/sentry_sdk/api.py @@ -30,17 +30,7 @@ from typing import Union from typing import Generator - from sentry_sdk.client import BaseClient - from sentry_sdk._types import ( - Event, - Hint, - Breadcrumb, - BreadcrumbHint, - ExcInfo, - MeasurementUnit, - LogLevelStr, - ) - from sentry_sdk.tracing import Span + import sentry_sdk T = TypeVar("T") F = TypeVar("F", bound=Callable[..., Any]) @@ -102,7 +92,7 @@ def clientmethod(f): @scopemethod def get_client(): - # type: () -> BaseClient + # type: () -> sentry_sdk.client.BaseClient return Scope.get_client() @@ -150,8 +140,8 @@ def last_event_id(): @scopemethod def capture_event( - event, # type: Event - hint=None, # type: Optional[Hint] + event, # type: sentry_sdk._types.Event + hint=None, # type: Optional[sentry_sdk._types.Hint] scope=None, # type: Optional[Any] **scope_kwargs, # type: Any ): @@ -162,7 +152,7 @@ def capture_event( @scopemethod def capture_message( message, # type: str - level=None, # type: Optional[LogLevelStr] + level=None, # type: Optional[sentry_sdk._types.LogLevelStr] scope=None, # type: Optional[Any] **scope_kwargs, # type: Any ): @@ -174,7 +164,7 @@ def capture_message( @scopemethod def capture_exception( - error=None, # type: Optional[Union[BaseException, ExcInfo]] + error=None, # type: Optional[Union[BaseException, sentry_sdk._types.ExcInfo]] scope=None, # type: Optional[Any] **scope_kwargs, # type: Any ): @@ -184,8 +174,8 @@ def capture_exception( @scopemethod def add_breadcrumb( - crumb=None, # type: Optional[Breadcrumb] - hint=None, # type: Optional[BreadcrumbHint] + crumb=None, # type: Optional[sentry_sdk._types.Breadcrumb] + hint=None, # type: Optional[sentry_sdk._types.BreadcrumbHint] **kwargs, # type: Any ): # type: (...) -> None @@ -224,7 +214,7 @@ def set_user(value): @scopemethod def set_level(value): - # type: (LogLevelStr) -> None + # type: (sentry_sdk._types.LogLevelStr) -> None return get_isolation_scope().set_level(value) @@ -238,7 +228,7 @@ def flush( def start_span(**kwargs): - # type: (Any) -> Span + # type: (Any) -> sentry_sdk.tracing.Span """ Start and return a span. @@ -255,10 +245,10 @@ def start_span(**kwargs): def start_transaction( - transaction=None, # type: Optional[Span] + transaction=None, # type: Optional[sentry_sdk.tracing.Span] **kwargs, # type: Any ): - # type: (...) -> Span + # type: (...) -> sentry_sdk.tracing.Span """ .. deprecated:: 3.0.0 This function is deprecated and will be removed in a future release. @@ -298,14 +288,14 @@ def start_transaction( def set_measurement(name, value, unit=""): - # type: (str, float, MeasurementUnit) -> None + # type: (str, float, sentry_sdk._types.MeasurementUnit) -> None transaction = get_current_scope().root_span if transaction is not None: transaction.set_measurement(name, value, unit) def get_current_span(scope=None): - # type: (Optional[Scope]) -> Optional[Span] + # type: (Optional[Scope]) -> Optional[sentry_sdk.tracing.Span] """ Returns the currently active span if there is one running, otherwise `None` """ diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py index 2c164fba3a..5fbd162299 100644 --- a/sentry_sdk/consts.py +++ b/sentry_sdk/consts.py @@ -478,6 +478,46 @@ class OP: SOCKET_DNS = "socket.dns" +BAGGAGE_HEADER_NAME = "baggage" +SENTRY_TRACE_HEADER_NAME = "sentry-trace" + +DEFAULT_SPAN_ORIGIN = "manual" +DEFAULT_SPAN_NAME = "" + + +# Transaction source +# see https://develop.sentry.dev/sdk/event-payloads/transaction/#transaction-annotations +class TransactionSource(str, Enum): + COMPONENT = "component" + CUSTOM = "custom" + ROUTE = "route" + TASK = "task" + URL = "url" + VIEW = "view" + + def __str__(self): + # type: () -> str + return self.value + + +# These are typically high cardinality and the server hates them +LOW_QUALITY_TRANSACTION_SOURCES = [ + TransactionSource.URL, +] + +SOURCE_FOR_STYLE = { + "endpoint": TransactionSource.COMPONENT, + "function_name": TransactionSource.COMPONENT, + "handler_name": TransactionSource.COMPONENT, + "method_and_path_pattern": TransactionSource.ROUTE, + "path": TransactionSource.URL, + "route_name": TransactionSource.COMPONENT, + "route_pattern": TransactionSource.ROUTE, + "uri_template": TransactionSource.ROUTE, + "url": TransactionSource.ROUTE, +} + + # This type exists to trick mypy and PyCharm into thinking `init` and `Client` # take these arguments (even though they take opaque **kwargs) class ClientConstructor: diff --git a/sentry_sdk/integrations/aiohttp.py b/sentry_sdk/integrations/aiohttp.py index 8115063c9c..bcdd964b8d 100644 --- a/sentry_sdk/integrations/aiohttp.py +++ b/sentry_sdk/integrations/aiohttp.py @@ -3,7 +3,14 @@ from functools import wraps import sentry_sdk -from sentry_sdk.consts import OP, SPANSTATUS, SPANDATA +from sentry_sdk.consts import ( + OP, + SPANSTATUS, + SPANDATA, + BAGGAGE_HEADER_NAME, + SOURCE_FOR_STYLE, + TransactionSource, +) from sentry_sdk.integrations import ( _DEFAULT_FAILED_REQUEST_STATUS_CODES, _check_minimum_version, @@ -17,11 +24,6 @@ _request_headers_to_span_attributes, request_body_within_bounds, ) -from sentry_sdk.tracing import ( - BAGGAGE_HEADER_NAME, - SOURCE_FOR_STYLE, - TransactionSource, -) from sentry_sdk.tracing_utils import should_propagate_trace from sentry_sdk.utils import ( capture_internal_exceptions, diff --git a/sentry_sdk/integrations/asgi.py b/sentry_sdk/integrations/asgi.py index 5911b2a84c..c65ce5bed3 100644 --- a/sentry_sdk/integrations/asgi.py +++ b/sentry_sdk/integrations/asgi.py @@ -10,7 +10,7 @@ from functools import partial import sentry_sdk -from sentry_sdk.consts import OP +from sentry_sdk.consts import OP, SOURCE_FOR_STYLE, TransactionSource from sentry_sdk.integrations._asgi_common import ( _get_headers, @@ -23,10 +23,6 @@ _request_headers_to_span_attributes, ) from sentry_sdk.sessions import track_session -from sentry_sdk.tracing import ( - SOURCE_FOR_STYLE, - TransactionSource, -) from sentry_sdk.utils import ( ContextVar, capture_internal_exceptions, diff --git a/sentry_sdk/integrations/bottle.py b/sentry_sdk/integrations/bottle.py index 8a9fc41208..1fefcf0319 100644 --- a/sentry_sdk/integrations/bottle.py +++ b/sentry_sdk/integrations/bottle.py @@ -1,7 +1,7 @@ import functools import sentry_sdk -from sentry_sdk.tracing import SOURCE_FOR_STYLE +from sentry_sdk.consts import SOURCE_FOR_STYLE from sentry_sdk.utils import ( capture_internal_exceptions, ensure_integration_enabled, diff --git a/sentry_sdk/integrations/celery/__init__.py b/sentry_sdk/integrations/celery/__init__.py index bbaf3aec77..95a09e6029 100644 --- a/sentry_sdk/integrations/celery/__init__.py +++ b/sentry_sdk/integrations/celery/__init__.py @@ -4,7 +4,7 @@ import sentry_sdk from sentry_sdk import isolation_scope -from sentry_sdk.consts import OP, SPANSTATUS, SPANDATA +from sentry_sdk.consts import OP, SPANSTATUS, SPANDATA, BAGGAGE_HEADER_NAME from sentry_sdk.integrations import _check_minimum_version, Integration, DidNotEnable from sentry_sdk.integrations.celery.beat import ( _patch_beat_apply_entry, @@ -13,7 +13,7 @@ ) from sentry_sdk.integrations.celery.utils import _now_seconds_since_epoch from sentry_sdk.integrations.logging import ignore_logger -from sentry_sdk.tracing import BAGGAGE_HEADER_NAME, TransactionSource +from sentry_sdk.tracing import TransactionSource from sentry_sdk.tracing_utils import Baggage from sentry_sdk.utils import ( capture_internal_exceptions, diff --git a/sentry_sdk/integrations/django/__init__.py b/sentry_sdk/integrations/django/__init__.py index 0681e734ea..e62ba63f70 100644 --- a/sentry_sdk/integrations/django/__init__.py +++ b/sentry_sdk/integrations/django/__init__.py @@ -6,10 +6,9 @@ from importlib import import_module import sentry_sdk -from sentry_sdk.consts import OP, SPANDATA +from sentry_sdk.consts import OP, SPANDATA, SOURCE_FOR_STYLE, TransactionSource from sentry_sdk.scope import add_global_event_processor, should_send_default_pii from sentry_sdk.serializer import add_global_repr_processor -from sentry_sdk.tracing import SOURCE_FOR_STYLE, TransactionSource from sentry_sdk.tracing_utils import add_query_source, record_sql_queries from sentry_sdk.utils import ( AnnotatedValue, diff --git a/sentry_sdk/integrations/falcon.py b/sentry_sdk/integrations/falcon.py index 0b5c9c4fe7..9038c01a3f 100644 --- a/sentry_sdk/integrations/falcon.py +++ b/sentry_sdk/integrations/falcon.py @@ -1,8 +1,8 @@ import sentry_sdk +from sentry_sdk.consts import SOURCE_FOR_STYLE from sentry_sdk.integrations import _check_minimum_version, Integration, DidNotEnable from sentry_sdk.integrations._wsgi_common import RequestExtractor from sentry_sdk.integrations.wsgi import SentryWsgiMiddleware -from sentry_sdk.tracing import SOURCE_FOR_STYLE from sentry_sdk.utils import ( capture_internal_exceptions, ensure_integration_enabled, diff --git a/sentry_sdk/integrations/fastapi.py b/sentry_sdk/integrations/fastapi.py index 4a0080c6fd..0e087e3975 100644 --- a/sentry_sdk/integrations/fastapi.py +++ b/sentry_sdk/integrations/fastapi.py @@ -3,9 +3,9 @@ from functools import wraps import sentry_sdk +from sentry_sdk.consts import SOURCE_FOR_STYLE, TransactionSource from sentry_sdk.integrations import DidNotEnable from sentry_sdk.scope import should_send_default_pii -from sentry_sdk.tracing import SOURCE_FOR_STYLE, TransactionSource from sentry_sdk.utils import ( transaction_from_function, logger, diff --git a/sentry_sdk/integrations/flask.py b/sentry_sdk/integrations/flask.py index f45ec6db20..9223eacd24 100644 --- a/sentry_sdk/integrations/flask.py +++ b/sentry_sdk/integrations/flask.py @@ -1,4 +1,5 @@ import sentry_sdk +from sentry_sdk.consts import SOURCE_FOR_STYLE from sentry_sdk.integrations import _check_minimum_version, DidNotEnable, Integration from sentry_sdk.integrations._wsgi_common import ( DEFAULT_HTTP_METHODS_TO_CAPTURE, @@ -6,7 +7,6 @@ ) from sentry_sdk.integrations.wsgi import SentryWsgiMiddleware from sentry_sdk.scope import should_send_default_pii -from sentry_sdk.tracing import SOURCE_FOR_STYLE from sentry_sdk.utils import ( capture_internal_exceptions, ensure_integration_enabled, diff --git a/sentry_sdk/integrations/httpx.py b/sentry_sdk/integrations/httpx.py index a7c391851c..988778acd0 100644 --- a/sentry_sdk/integrations/httpx.py +++ b/sentry_sdk/integrations/httpx.py @@ -1,7 +1,6 @@ import sentry_sdk -from sentry_sdk.consts import OP, SPANDATA +from sentry_sdk.consts import OP, SPANDATA, BAGGAGE_HEADER_NAME from sentry_sdk.integrations import Integration, DidNotEnable -from sentry_sdk.tracing import BAGGAGE_HEADER_NAME from sentry_sdk.tracing_utils import Baggage, should_propagate_trace from sentry_sdk.utils import ( SENSITIVE_DATA_SUBSTITUTE, diff --git a/sentry_sdk/integrations/huey.py b/sentry_sdk/integrations/huey.py index fcdbd4f9f9..720d38f8e2 100644 --- a/sentry_sdk/integrations/huey.py +++ b/sentry_sdk/integrations/huey.py @@ -3,14 +3,15 @@ import sentry_sdk from sentry_sdk.api import get_baggage, get_traceparent -from sentry_sdk.consts import OP, SPANSTATUS -from sentry_sdk.integrations import DidNotEnable, Integration -from sentry_sdk.scope import should_send_default_pii -from sentry_sdk.tracing import ( +from sentry_sdk.consts import ( + OP, + SPANSTATUS, BAGGAGE_HEADER_NAME, SENTRY_TRACE_HEADER_NAME, TransactionSource, ) +from sentry_sdk.integrations import DidNotEnable, Integration +from sentry_sdk.scope import should_send_default_pii from sentry_sdk.utils import ( capture_internal_exceptions, ensure_integration_enabled, diff --git a/sentry_sdk/integrations/litestar.py b/sentry_sdk/integrations/litestar.py index 484fea46c8..66b7040f6d 100644 --- a/sentry_sdk/integrations/litestar.py +++ b/sentry_sdk/integrations/litestar.py @@ -1,6 +1,6 @@ from collections.abc import Set import sentry_sdk -from sentry_sdk.consts import OP +from sentry_sdk.consts import OP, TransactionSource, SOURCE_FOR_STYLE from sentry_sdk.integrations import ( _DEFAULT_FAILED_REQUEST_STATUS_CODES, DidNotEnable, @@ -9,7 +9,6 @@ from sentry_sdk.integrations.asgi import SentryAsgiMiddleware from sentry_sdk.integrations.logging import ignore_logger from sentry_sdk.scope import should_send_default_pii -from sentry_sdk.tracing import TransactionSource, SOURCE_FOR_STYLE from sentry_sdk.utils import ( ensure_integration_enabled, event_from_exception, diff --git a/sentry_sdk/integrations/opentelemetry/scope.py b/sentry_sdk/integrations/opentelemetry/scope.py index 491404529a..f12adc29f0 100644 --- a/sentry_sdk/integrations/opentelemetry/scope.py +++ b/sentry_sdk/integrations/opentelemetry/scope.py @@ -17,17 +17,17 @@ use_span, ) -from sentry_sdk.integrations.opentelemetry.consts import ( +from sentry_sdk.opentelemetry.consts import ( SENTRY_SCOPES_KEY, SENTRY_FORK_ISOLATION_SCOPE_KEY, SENTRY_USE_CURRENT_SCOPE_KEY, SENTRY_USE_ISOLATION_SCOPE_KEY, TRACESTATE_SAMPLED_KEY, ) -from sentry_sdk.integrations.opentelemetry.utils import trace_state_from_baggage from sentry_sdk.opentelemetry.contextvars_context import ( SentryContextVarsRuntimeContext, ) +from sentry_sdk.opentelemetry.utils import trace_state_from_baggage from sentry_sdk.scope import Scope, ScopeType from sentry_sdk.tracing import Span from sentry_sdk._types import TYPE_CHECKING diff --git a/sentry_sdk/integrations/pyramid.py b/sentry_sdk/integrations/pyramid.py index d1475ada65..a4d30e38a4 100644 --- a/sentry_sdk/integrations/pyramid.py +++ b/sentry_sdk/integrations/pyramid.py @@ -4,11 +4,11 @@ import weakref import sentry_sdk +from sentry_sdk.consts import SOURCE_FOR_STYLE from sentry_sdk.integrations import Integration, DidNotEnable from sentry_sdk.integrations._wsgi_common import RequestExtractor from sentry_sdk.integrations.wsgi import SentryWsgiMiddleware from sentry_sdk.scope import should_send_default_pii -from sentry_sdk.tracing import SOURCE_FOR_STYLE from sentry_sdk.utils import ( capture_internal_exceptions, ensure_integration_enabled, diff --git a/sentry_sdk/integrations/quart.py b/sentry_sdk/integrations/quart.py index f7f8ce7902..68c1342216 100644 --- a/sentry_sdk/integrations/quart.py +++ b/sentry_sdk/integrations/quart.py @@ -3,11 +3,11 @@ from functools import wraps import sentry_sdk +from sentry_sdk.consts import SOURCE_FOR_STYLE from sentry_sdk.integrations import DidNotEnable, Integration from sentry_sdk.integrations._wsgi_common import _filter_headers from sentry_sdk.integrations.asgi import SentryAsgiMiddleware from sentry_sdk.scope import should_send_default_pii -from sentry_sdk.tracing import SOURCE_FOR_STYLE from sentry_sdk.utils import ( capture_internal_exceptions, ensure_integration_enabled, diff --git a/sentry_sdk/integrations/starlette.py b/sentry_sdk/integrations/starlette.py index bdb842f596..e6016a3624 100644 --- a/sentry_sdk/integrations/starlette.py +++ b/sentry_sdk/integrations/starlette.py @@ -5,7 +5,7 @@ from json import JSONDecodeError import sentry_sdk -from sentry_sdk.consts import OP +from sentry_sdk.consts import OP, SOURCE_FOR_STYLE, TransactionSource from sentry_sdk.integrations import ( DidNotEnable, Integration, @@ -18,10 +18,6 @@ ) from sentry_sdk.integrations.asgi import SentryAsgiMiddleware from sentry_sdk.scope import should_send_default_pii -from sentry_sdk.tracing import ( - SOURCE_FOR_STYLE, - TransactionSource, -) from sentry_sdk.utils import ( AnnotatedValue, capture_internal_exceptions, diff --git a/sentry_sdk/integrations/starlite.py b/sentry_sdk/integrations/starlite.py index dae105447b..928c697373 100644 --- a/sentry_sdk/integrations/starlite.py +++ b/sentry_sdk/integrations/starlite.py @@ -1,9 +1,8 @@ import sentry_sdk -from sentry_sdk.consts import OP +from sentry_sdk.consts import OP, SOURCE_FOR_STYLE, TransactionSource from sentry_sdk.integrations import DidNotEnable, Integration from sentry_sdk.integrations.asgi import SentryAsgiMiddleware from sentry_sdk.scope import should_send_default_pii -from sentry_sdk.tracing import SOURCE_FOR_STYLE, TransactionSource from sentry_sdk.utils import ( ensure_integration_enabled, event_from_exception, diff --git a/sentry_sdk/integrations/opentelemetry/consts.py b/sentry_sdk/opentelemetry/consts.py similarity index 91% rename from sentry_sdk/integrations/opentelemetry/consts.py rename to sentry_sdk/opentelemetry/consts.py index d4b2b47768..0e3cb54948 100644 --- a/sentry_sdk/integrations/opentelemetry/consts.py +++ b/sentry_sdk/opentelemetry/consts.py @@ -23,8 +23,6 @@ class SentrySpanAttribute: - # XXX not all of these need separate attributes, we might just use - # existing otel attrs for some DESCRIPTION = "sentry.description" OP = "sentry.op" ORIGIN = "sentry.origin" diff --git a/sentry_sdk/opentelemetry/contextvars_context.py b/sentry_sdk/opentelemetry/contextvars_context.py index 7e61a45bc5..66d59b4ae1 100644 --- a/sentry_sdk/opentelemetry/contextvars_context.py +++ b/sentry_sdk/opentelemetry/contextvars_context.py @@ -5,7 +5,7 @@ from opentelemetry.context.contextvars_context import ContextVarsRuntimeContext import sentry_sdk -from sentry_sdk.integrations.opentelemetry.consts import ( +from sentry_sdk.opentelemetry.consts import ( SENTRY_SCOPES_KEY, SENTRY_FORK_ISOLATION_SCOPE_KEY, SENTRY_USE_CURRENT_SCOPE_KEY, diff --git a/sentry_sdk/opentelemetry/propagator.py b/sentry_sdk/opentelemetry/propagator.py index fcc2009849..2a9ad70434 100644 --- a/sentry_sdk/opentelemetry/propagator.py +++ b/sentry_sdk/opentelemetry/propagator.py @@ -21,15 +21,15 @@ TraceFlags, ) -from sentry_sdk.integrations.opentelemetry.consts import ( +from sentry_sdk.consts import ( + BAGGAGE_HEADER_NAME, + SENTRY_TRACE_HEADER_NAME, +) +from sentry_sdk.opentelemetry.consts import ( SENTRY_BAGGAGE_KEY, SENTRY_TRACE_KEY, SENTRY_SCOPES_KEY, ) -from sentry_sdk.tracing import ( - BAGGAGE_HEADER_NAME, - SENTRY_TRACE_HEADER_NAME, -) from sentry_sdk.tracing_utils import Baggage, extract_sentrytrace_data from typing import TYPE_CHECKING diff --git a/sentry_sdk/opentelemetry/sampler.py b/sentry_sdk/opentelemetry/sampler.py index 83d647f1d8..2afe26d95e 100644 --- a/sentry_sdk/opentelemetry/sampler.py +++ b/sentry_sdk/opentelemetry/sampler.py @@ -6,17 +6,17 @@ from opentelemetry.trace.span import TraceState import sentry_sdk -from sentry_sdk.tracing_utils import ( - _generate_sample_rand, - has_tracing_enabled, -) -from sentry_sdk.utils import is_valid_sample_rate, logger -from sentry_sdk.integrations.opentelemetry.consts import ( +from sentry_sdk.opentelemetry.consts import ( TRACESTATE_SAMPLED_KEY, TRACESTATE_SAMPLE_RAND_KEY, TRACESTATE_SAMPLE_RATE_KEY, SentrySpanAttribute, ) +from sentry_sdk.tracing_utils import ( + _generate_sample_rand, + has_tracing_enabled, +) +from sentry_sdk.utils import is_valid_sample_rate, logger from typing import TYPE_CHECKING diff --git a/sentry_sdk/opentelemetry/span_processor.py b/sentry_sdk/opentelemetry/span_processor.py index ec936125c3..6da616ed87 100644 --- a/sentry_sdk/opentelemetry/span_processor.py +++ b/sentry_sdk/opentelemetry/span_processor.py @@ -12,17 +12,14 @@ from opentelemetry.sdk.trace import Span, ReadableSpan, SpanProcessor import sentry_sdk -from sentry_sdk.consts import SPANDATA -from sentry_sdk.tracing import DEFAULT_SPAN_ORIGIN +from sentry_sdk.consts import SPANDATA, DEFAULT_SPAN_ORIGIN from sentry_sdk.utils import get_current_thread_meta -from sentry_sdk.opentelemetry.sampler import create_sampling_context -from sentry_sdk.profiler.continuous_profiler import ( - try_autostart_continuous_profiler, - get_profiler_id, - try_profile_lifecycle_trace_start, +from sentry_sdk.opentelemetry.consts import ( + OTEL_SENTRY_CONTEXT, + SentrySpanAttribute, ) -from sentry_sdk.profiler.transaction_profiler import Profile -from sentry_sdk.integrations.opentelemetry.utils import ( +from sentry_sdk.opentelemetry.sampler import create_sampling_context +from sentry_sdk.opentelemetry.utils import ( is_sentry_span, convert_from_otel_timestamp, extract_span_attributes, @@ -33,10 +30,12 @@ get_sentry_meta, set_sentry_meta, ) -from sentry_sdk.integrations.opentelemetry.consts import ( - OTEL_SENTRY_CONTEXT, - SentrySpanAttribute, +from sentry_sdk.profiler.continuous_profiler import ( + try_autostart_continuous_profiler, + get_profiler_id, + try_profile_lifecycle_trace_start, ) +from sentry_sdk.profiler.transaction_profiler import Profile from sentry_sdk._types import TYPE_CHECKING if TYPE_CHECKING: diff --git a/sentry_sdk/integrations/opentelemetry/utils.py b/sentry_sdk/opentelemetry/utils.py similarity index 98% rename from sentry_sdk/integrations/opentelemetry/utils.py rename to sentry_sdk/opentelemetry/utils.py index 6e711c34a8..ade9858855 100644 --- a/sentry_sdk/integrations/opentelemetry/utils.py +++ b/sentry_sdk/opentelemetry/utils.py @@ -17,14 +17,15 @@ import sentry_sdk from sentry_sdk.utils import Dsn -from sentry_sdk.consts import SPANSTATUS, OP, SPANDATA -from sentry_sdk.tracing import ( - get_span_status_from_http_code, +from sentry_sdk.consts import ( + SPANSTATUS, + OP, + SPANDATA, DEFAULT_SPAN_ORIGIN, LOW_QUALITY_TRANSACTION_SOURCES, ) -from sentry_sdk.tracing_utils import Baggage -from sentry_sdk.integrations.opentelemetry.consts import SentrySpanAttribute +from sentry_sdk.opentelemetry.consts import SentrySpanAttribute +from sentry_sdk.tracing_utils import Baggage, get_span_status_from_http_code from sentry_sdk._types import TYPE_CHECKING diff --git a/sentry_sdk/scope.py b/sentry_sdk/scope.py index 487b45b583..1f3162605f 100644 --- a/sentry_sdk/scope.py +++ b/sentry_sdk/scope.py @@ -11,7 +11,12 @@ from sentry_sdk._types import AnnotatedValue from sentry_sdk.attachments import Attachment -from sentry_sdk.consts import DEFAULT_MAX_BREADCRUMBS, FALSE_VALUES +from sentry_sdk.consts import ( + DEFAULT_MAX_BREADCRUMBS, + FALSE_VALUES, + BAGGAGE_HEADER_NAME, + SENTRY_TRACE_HEADER_NAME, +) from sentry_sdk.feature_flags import FlagBuffer, DEFAULT_FLAG_CAPACITY from sentry_sdk.profiler.transaction_profiler import Profile from sentry_sdk.session import Session @@ -21,8 +26,6 @@ PropagationContext, ) from sentry_sdk.tracing import ( - BAGGAGE_HEADER_NAME, - SENTRY_TRACE_HEADER_NAME, NoOpSpan, Span, ) diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py index 37f56453bf..c56a7e729c 100644 --- a/sentry_sdk/tracing.py +++ b/sentry_sdk/tracing.py @@ -1,5 +1,4 @@ from datetime import datetime -from enum import Enum import json import warnings @@ -16,7 +15,29 @@ from opentelemetry.sdk.trace import ReadableSpan import sentry_sdk -from sentry_sdk.consts import SPANSTATUS, SPANDATA +from sentry_sdk.consts import ( + DEFAULT_SPAN_NAME, + DEFAULT_SPAN_ORIGIN, + BAGGAGE_HEADER_NAME, + SENTRY_TRACE_HEADER_NAME, + SPANSTATUS, + SPANDATA, + TransactionSource, +) +from sentry_sdk.opentelemetry.consts import ( + TRACESTATE_SAMPLE_RATE_KEY, + SentrySpanAttribute, +) +from sentry_sdk.opentelemetry.utils import ( + baggage_from_trace_state, + convert_from_otel_timestamp, + convert_to_otel_timestamp, + get_trace_context, + get_trace_state, + get_sentry_meta, + serialize_trace_state, +) +from sentry_sdk.tracing_utils import get_span_status_from_http_code from sentry_sdk.utils import ( _serialize_span_attribute, get_current_thread_meta, @@ -48,87 +69,10 @@ from sentry_sdk.tracing_utils import Baggage -BAGGAGE_HEADER_NAME = "baggage" -SENTRY_TRACE_HEADER_NAME = "sentry-trace" - - -# Transaction source -# see https://develop.sentry.dev/sdk/event-payloads/transaction/#transaction-annotations -class TransactionSource(str, Enum): - COMPONENT = "component" - CUSTOM = "custom" - ROUTE = "route" - TASK = "task" - URL = "url" - VIEW = "view" - - def __str__(self): - # type: () -> str - return self.value - - -# These are typically high cardinality and the server hates them -LOW_QUALITY_TRANSACTION_SOURCES = [ - TransactionSource.URL, -] - -SOURCE_FOR_STYLE = { - "endpoint": TransactionSource.COMPONENT, - "function_name": TransactionSource.COMPONENT, - "handler_name": TransactionSource.COMPONENT, - "method_and_path_pattern": TransactionSource.ROUTE, - "path": TransactionSource.URL, - "route_name": TransactionSource.COMPONENT, - "route_pattern": TransactionSource.ROUTE, - "uri_template": TransactionSource.ROUTE, - "url": TransactionSource.ROUTE, -} - -DEFAULT_SPAN_ORIGIN = "manual" -DEFAULT_SPAN_NAME = "" tracer = otel_trace.get_tracer(__name__) -def get_span_status_from_http_code(http_status_code): - # type: (int) -> str - """ - Returns the Sentry status corresponding to the given HTTP status code. - - See: https://develop.sentry.dev/sdk/event-payloads/contexts/#trace-context - """ - if http_status_code < 400: - return SPANSTATUS.OK - - elif 400 <= http_status_code < 500: - if http_status_code == 403: - return SPANSTATUS.PERMISSION_DENIED - elif http_status_code == 404: - return SPANSTATUS.NOT_FOUND - elif http_status_code == 429: - return SPANSTATUS.RESOURCE_EXHAUSTED - elif http_status_code == 413: - return SPANSTATUS.FAILED_PRECONDITION - elif http_status_code == 401: - return SPANSTATUS.UNAUTHENTICATED - elif http_status_code == 409: - return SPANSTATUS.ALREADY_EXISTS - else: - return SPANSTATUS.INVALID_ARGUMENT - - elif 500 <= http_status_code < 600: - if http_status_code == 504: - return SPANSTATUS.DEADLINE_EXCEEDED - elif http_status_code == 501: - return SPANSTATUS.UNIMPLEMENTED - elif http_status_code == 503: - return SPANSTATUS.UNAVAILABLE - else: - return SPANSTATUS.INTERNAL_ERROR - - return SPANSTATUS.UNKNOWN_ERROR - - class NoOpSpan: def __init__(self, **kwargs): # type: (Any) -> None @@ -260,12 +204,6 @@ def __init__( if skip_span: self._otel_span = INVALID_SPAN else: - from sentry_sdk.integrations.opentelemetry.consts import ( - SentrySpanAttribute, - ) - from sentry_sdk.integrations.opentelemetry.utils import ( - convert_to_otel_timestamp, - ) if start_timestamp is not None: # OTel timestamps have nanosecond precision @@ -360,38 +298,26 @@ def __exit__(self, ty, value, tb): @property def description(self): # type: () -> Optional[str] - from sentry_sdk.integrations.opentelemetry.consts import SentrySpanAttribute - return self.get_attribute(SentrySpanAttribute.DESCRIPTION) @description.setter def description(self, value): # type: (Optional[str]) -> None - from sentry_sdk.integrations.opentelemetry.consts import SentrySpanAttribute - self.set_attribute(SentrySpanAttribute.DESCRIPTION, value) @property def origin(self): # type: () -> Optional[str] - from sentry_sdk.integrations.opentelemetry.consts import SentrySpanAttribute - return self.get_attribute(SentrySpanAttribute.ORIGIN) @origin.setter def origin(self, value): # type: (Optional[str]) -> None - from sentry_sdk.integrations.opentelemetry.consts import SentrySpanAttribute - self.set_attribute(SentrySpanAttribute.ORIGIN, value) @property def root_span(self): # type: () -> Optional[Span] - from sentry_sdk.integrations.opentelemetry.utils import ( - get_sentry_meta, - ) - root_otel_span = cast( "Optional[OtelSpan]", get_sentry_meta(self._otel_span, "root_span") ) @@ -437,10 +363,6 @@ def sampled(self): @property def sample_rate(self): # type: () -> Optional[float] - from sentry_sdk.integrations.opentelemetry.consts import ( - TRACESTATE_SAMPLE_RATE_KEY, - ) - sample_rate = self._otel_span.get_span_context().trace_state.get( TRACESTATE_SAMPLE_RATE_KEY ) @@ -449,36 +371,26 @@ def sample_rate(self): @property def op(self): # type: () -> Optional[str] - from sentry_sdk.integrations.opentelemetry.consts import SentrySpanAttribute - return self.get_attribute(SentrySpanAttribute.OP) @op.setter def op(self, value): # type: (Optional[str]) -> None - from sentry_sdk.integrations.opentelemetry.consts import SentrySpanAttribute - self.set_attribute(SentrySpanAttribute.OP, value) @property def name(self): # type: () -> Optional[str] - from sentry_sdk.integrations.opentelemetry.consts import SentrySpanAttribute - return self.get_attribute(SentrySpanAttribute.NAME) @name.setter def name(self, value): # type: (Optional[str]) -> None - from sentry_sdk.integrations.opentelemetry.consts import SentrySpanAttribute - self.set_attribute(SentrySpanAttribute.NAME, value) @property def source(self): # type: () -> str - from sentry_sdk.integrations.opentelemetry.consts import SentrySpanAttribute - return ( self.get_attribute(SentrySpanAttribute.SOURCE) or TransactionSource.CUSTOM ) @@ -486,8 +398,6 @@ def source(self): @source.setter def source(self, value): # type: (str) -> None - from sentry_sdk.integrations.opentelemetry.consts import SentrySpanAttribute - self.set_attribute(SentrySpanAttribute.SOURCE, value) @property @@ -500,10 +410,6 @@ def start_timestamp(self): if start_time is None: return None - from sentry_sdk.integrations.opentelemetry.utils import ( - convert_from_otel_timestamp, - ) - return convert_from_otel_timestamp(start_time) @property @@ -516,10 +422,6 @@ def timestamp(self): if end_time is None: return None - from sentry_sdk.integrations.opentelemetry.utils import ( - convert_from_otel_timestamp, - ) - return convert_from_otel_timestamp(end_time) def start_child(self, **kwargs): @@ -529,11 +431,6 @@ def start_child(self, **kwargs): def iter_headers(self): # type: () -> Iterator[Tuple[str, str]] yield SENTRY_TRACE_HEADER_NAME, self.to_traceparent() - - from sentry_sdk.integrations.opentelemetry.utils import ( - serialize_trace_state, - ) - yield BAGGAGE_HEADER_NAME, serialize_trace_state(self.trace_state) def to_traceparent(self): @@ -554,10 +451,6 @@ def to_traceparent(self): @property def trace_state(self): # type: () -> TraceState - from sentry_sdk.integrations.opentelemetry.utils import ( - get_trace_state, - ) - return get_trace_state(self._otel_span) def to_baggage(self): @@ -566,16 +459,10 @@ def to_baggage(self): def get_baggage(self): # type: () -> Baggage - from sentry_sdk.integrations.opentelemetry.utils import ( - baggage_from_trace_state, - ) - return baggage_from_trace_state(self.trace_state) def set_tag(self, key, value): # type: (str, Any) -> None - from sentry_sdk.integrations.opentelemetry.consts import SentrySpanAttribute - self.set_attribute(f"{SentrySpanAttribute.TAG}.{key}", value) def set_data(self, key, value): @@ -642,8 +529,6 @@ def set_status(self, status): def set_measurement(self, name, value, unit=""): # type: (str, float, MeasurementUnit) -> None - from sentry_sdk.integrations.opentelemetry.consts import SentrySpanAttribute - # Stringify value here since OTel expects all seq items to be of one type self.set_attribute( f"{SentrySpanAttribute.MEASUREMENT}.{name}", (str(value), unit) @@ -674,10 +559,6 @@ def is_success(self): def finish(self, end_timestamp=None): # type: (Optional[Union[float, datetime]]) -> None if end_timestamp is not None: - from sentry_sdk.integrations.opentelemetry.utils import ( - convert_to_otel_timestamp, - ) - self._otel_span.end(convert_to_otel_timestamp(end_timestamp)) else: self._otel_span.end() @@ -696,16 +577,10 @@ def get_trace_context(self): if not isinstance(self._otel_span, ReadableSpan): return {} - from sentry_sdk.integrations.opentelemetry.utils import ( - get_trace_context, - ) - return get_trace_context(self._otel_span) def set_context(self, key, value): # type: (str, Any) -> None - from sentry_sdk.integrations.opentelemetry.consts import SentrySpanAttribute - # TODO-neel-potel we cannot add dicts here self.set_attribute(f"{SentrySpanAttribute.CONTEXT}.{key}", value) diff --git a/sentry_sdk/tracing_utils.py b/sentry_sdk/tracing_utils.py index 27320ac589..c1d60cf269 100644 --- a/sentry_sdk/tracing_utils.py +++ b/sentry_sdk/tracing_utils.py @@ -13,7 +13,13 @@ from urllib.parse import quote, unquote import sentry_sdk -from sentry_sdk.consts import OP, SPANDATA +from sentry_sdk.consts import ( + OP, + SPANDATA, + SPANSTATUS, + BAGGAGE_HEADER_NAME, + SENTRY_TRACE_HEADER_NAME, +) from sentry_sdk.utils import ( capture_internal_exceptions, filename_for_module, @@ -803,8 +809,40 @@ def _sample_rand_range(parent_sampled, sample_rate): return sample_rate, 1.0 -# Circular imports -from sentry_sdk.tracing import ( - BAGGAGE_HEADER_NAME, - SENTRY_TRACE_HEADER_NAME, -) +def get_span_status_from_http_code(http_status_code): + # type: (int) -> str + """ + Returns the Sentry status corresponding to the given HTTP status code. + + See: https://develop.sentry.dev/sdk/event-payloads/contexts/#trace-context + """ + if http_status_code < 400: + return SPANSTATUS.OK + + elif 400 <= http_status_code < 500: + if http_status_code == 403: + return SPANSTATUS.PERMISSION_DENIED + elif http_status_code == 404: + return SPANSTATUS.NOT_FOUND + elif http_status_code == 429: + return SPANSTATUS.RESOURCE_EXHAUSTED + elif http_status_code == 413: + return SPANSTATUS.FAILED_PRECONDITION + elif http_status_code == 401: + return SPANSTATUS.UNAUTHENTICATED + elif http_status_code == 409: + return SPANSTATUS.ALREADY_EXISTS + else: + return SPANSTATUS.INVALID_ARGUMENT + + elif 500 <= http_status_code < 600: + if http_status_code == 504: + return SPANSTATUS.DEADLINE_EXCEEDED + elif http_status_code == 501: + return SPANSTATUS.UNIMPLEMENTED + elif http_status_code == 503: + return SPANSTATUS.UNAVAILABLE + else: + return SPANSTATUS.INTERNAL_ERROR + + return SPANSTATUS.UNKNOWN_ERROR diff --git a/tests/integrations/opentelemetry/test_propagator.py b/tests/integrations/opentelemetry/test_propagator.py index f57679cc3e..49437fa896 100644 --- a/tests/integrations/opentelemetry/test_propagator.py +++ b/tests/integrations/opentelemetry/test_propagator.py @@ -6,7 +6,7 @@ from opentelemetry.propagators.textmap import DefaultSetter import sentry_sdk -from sentry_sdk.integrations.opentelemetry.consts import ( +from sentry_sdk.opentelemetry.consts import ( SENTRY_BAGGAGE_KEY, SENTRY_TRACE_KEY, ) diff --git a/tests/integrations/opentelemetry/test_utils.py b/tests/integrations/opentelemetry/test_utils.py index fde66bf590..b7bc055d3c 100644 --- a/tests/integrations/opentelemetry/test_utils.py +++ b/tests/integrations/opentelemetry/test_utils.py @@ -3,7 +3,7 @@ import pytest from opentelemetry.trace import SpanKind, Status, StatusCode -from sentry_sdk.integrations.opentelemetry.utils import ( +from sentry_sdk.opentelemetry.utils import ( extract_span_data, extract_span_status, span_data_for_db_query, From 2d518392198efa3e090ccae233775e1377633631 Mon Sep 17 00:00:00 2001 From: Ivana Kellyer Date: Mon, 14 Apr 2025 12:20:43 +0200 Subject: [PATCH 226/244] ref: Move otel scope out of `integrations/opentelemetry/` (#4276) Moving stuff out of `integrations/opentelemetry/` step by step since there is no OpenTelemetry integration anymore -- it's part of the core SDK. This moves `sentry_sdk/integrations/opentelemetry/scope.py` -> `sentry_sdk/opentelemetry/scope.py` Ref https://github.com/getsentry/sentry-python/issues/3853 --- sentry_sdk/_init_implementation.py | 2 +- sentry_sdk/api.py | 2 +- sentry_sdk/opentelemetry/contextvars_context.py | 2 +- sentry_sdk/opentelemetry/propagator.py | 2 +- sentry_sdk/{integrations => }/opentelemetry/scope.py | 0 setup.py | 4 +++- tests/conftest.py | 2 +- tests/test_scope.py | 2 +- 8 files changed, 9 insertions(+), 7 deletions(-) rename sentry_sdk/{integrations => }/opentelemetry/scope.py (100%) diff --git a/sentry_sdk/_init_implementation.py b/sentry_sdk/_init_implementation.py index 51bcc9029c..34e9d071e9 100644 --- a/sentry_sdk/_init_implementation.py +++ b/sentry_sdk/_init_implementation.py @@ -2,7 +2,7 @@ import sentry_sdk from sentry_sdk.consts import ClientConstructor -from sentry_sdk.integrations.opentelemetry.scope import setup_scope_context_management +from sentry_sdk.opentelemetry.scope import setup_scope_context_management if TYPE_CHECKING: from typing import Any, Optional diff --git a/sentry_sdk/api.py b/sentry_sdk/api.py index 0555244a7c..2ded31ee48 100644 --- a/sentry_sdk/api.py +++ b/sentry_sdk/api.py @@ -8,7 +8,7 @@ # TODO-neel-potel make 2 scope strategies/impls and switch from sentry_sdk.scope import Scope as BaseScope -from sentry_sdk.integrations.opentelemetry.scope import ( +from sentry_sdk.opentelemetry.scope import ( PotelScope as Scope, new_scope, isolation_scope, diff --git a/sentry_sdk/opentelemetry/contextvars_context.py b/sentry_sdk/opentelemetry/contextvars_context.py index 66d59b4ae1..51d450af82 100644 --- a/sentry_sdk/opentelemetry/contextvars_context.py +++ b/sentry_sdk/opentelemetry/contextvars_context.py @@ -15,7 +15,7 @@ if TYPE_CHECKING: from typing import Optional from contextvars import Token - import sentry_sdk.integrations.opentelemetry.scope as scope + import sentry_sdk.opentelemetry.scope as scope class SentryContextVarsRuntimeContext(ContextVarsRuntimeContext): diff --git a/sentry_sdk/opentelemetry/propagator.py b/sentry_sdk/opentelemetry/propagator.py index 2a9ad70434..6062e5643a 100644 --- a/sentry_sdk/opentelemetry/propagator.py +++ b/sentry_sdk/opentelemetry/propagator.py @@ -36,7 +36,7 @@ if TYPE_CHECKING: from typing import Optional, Set - import sentry_sdk.integrations.opentelemetry.scope as scope + import sentry_sdk.opentelemetry.scope as scope class SentryPropagator(TextMapPropagator): diff --git a/sentry_sdk/integrations/opentelemetry/scope.py b/sentry_sdk/opentelemetry/scope.py similarity index 100% rename from sentry_sdk/integrations/opentelemetry/scope.py rename to sentry_sdk/opentelemetry/scope.py diff --git a/setup.py b/setup.py index ecbb3e2f54..d55915e42a 100644 --- a/setup.py +++ b/setup.py @@ -85,7 +85,9 @@ def get_file_text(file_name): "unleash": ["UnleashClient>=6.0.1"], }, entry_points={ - "opentelemetry_propagator": ["sentry=sentry_sdk.opentelemetry:SentryPropagator"] + "opentelemetry_propagator": [ + "sentry=sentry_sdk.opentelemetry:SentryPropagator" + ], }, classifiers=[ "Development Status :: 5 - Production/Stable", diff --git a/tests/conftest.py b/tests/conftest.py index 3fdbab5e54..5987265e32 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -63,7 +63,7 @@ def benchmark(): from sentry_sdk import scope -from sentry_sdk.integrations.opentelemetry.scope import ( +from sentry_sdk.opentelemetry.scope import ( setup_scope_context_management, setup_initial_scopes, ) diff --git a/tests/test_scope.py b/tests/test_scope.py index c5a2d9380e..507e76046c 100644 --- a/tests/test_scope.py +++ b/tests/test_scope.py @@ -15,7 +15,7 @@ ScopeType, should_send_default_pii, ) -from sentry_sdk.integrations.opentelemetry.scope import ( +from sentry_sdk.opentelemetry.scope import ( PotelScope as Scope, use_scope, use_isolation_scope, From a61006625acdb775999e54b51705051a0b4f3b8b Mon Sep 17 00:00:00 2001 From: Ivana Kellyer Date: Tue, 15 Apr 2025 12:17:00 +0200 Subject: [PATCH 227/244] ref: Move OTel setup out of `integrations/opentelemetry/` (#4277) Moving stuff out of `integrations/opentelemetry/` step by step since there is no OpenTelemetry integration anymore -- it's part of the core SDK. - Moved `sentry_sdk/integrations/opentelemetry/integration.py` -> `sentry_sdk/opentelemetry/tracing.py`. - Removed all the experimental autoinstrumentation stuff from `integration.py`. - Removed `integrations/opentelemetry/` altogether (there was nothing left but `__init__.py`, which is now also gone). - Moved all tests from `tests/integrations/opentelemetry` to `tests/opentelemetry`. - Removed the potel/opentelemetry integration test targets from `tox.ini`. These will now be run as part of the Common test suite. Ref https://github.com/getsentry/sentry-python/issues/3853 --- .github/workflows/test-integrations-misc.yml | 2 +- scripts/populate_tox/tox.jinja | 14 ---- sentry_sdk/client.py | 8 ++ sentry_sdk/integrations/__init__.py | 1 - .../integrations/opentelemetry/__init__.py | 0 .../integrations/opentelemetry/integration.py | 80 ------------------- sentry_sdk/opentelemetry/tracing.py | 35 ++++++++ .../opentelemetry/__init__.py | 0 .../opentelemetry/test_compat.py | 0 .../opentelemetry/test_entry_points.py | 0 .../opentelemetry/test_potel.py | 0 .../opentelemetry/test_propagator.py | 0 .../opentelemetry/test_sampler.py | 0 .../opentelemetry/test_utils.py | 0 tox.ini | 16 +--- 15 files changed, 45 insertions(+), 111 deletions(-) delete mode 100644 sentry_sdk/integrations/opentelemetry/__init__.py delete mode 100644 sentry_sdk/integrations/opentelemetry/integration.py create mode 100644 sentry_sdk/opentelemetry/tracing.py rename tests/{integrations => }/opentelemetry/__init__.py (100%) rename tests/{integrations => }/opentelemetry/test_compat.py (100%) rename tests/{integrations => }/opentelemetry/test_entry_points.py (100%) rename tests/{integrations => }/opentelemetry/test_potel.py (100%) rename tests/{integrations => }/opentelemetry/test_propagator.py (100%) rename tests/{integrations => }/opentelemetry/test_sampler.py (100%) rename tests/{integrations => }/opentelemetry/test_utils.py (100%) diff --git a/.github/workflows/test-integrations-misc.yml b/.github/workflows/test-integrations-misc.yml index b9c347933c..1f670f1ddd 100644 --- a/.github/workflows/test-integrations-misc.yml +++ b/.github/workflows/test-integrations-misc.yml @@ -29,7 +29,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: ["3.7","3.8","3.9","3.10","3.11","3.12","3.13"] + python-version: ["3.7","3.8","3.10","3.11","3.12","3.13"] os: [ubuntu-22.04] steps: - uses: actions/checkout@v4.2.2 diff --git a/scripts/populate_tox/tox.jinja b/scripts/populate_tox/tox.jinja index 8088eb1998..b76dfd12fa 100644 --- a/scripts/populate_tox/tox.jinja +++ b/scripts/populate_tox/tox.jinja @@ -107,12 +107,6 @@ envlist = {py3.9,py3.11,py3.12}-openai-latest {py3.9,py3.11,py3.12}-openai-notiktoken - # OpenTelemetry (OTel) - {py3.7,py3.9,py3.12,py3.13}-opentelemetry - - # OpenTelemetry Experimental (POTel) - {py3.8,py3.9,py3.10,py3.11,py3.12,py3.13}-potel - # pure_eval {py3.7,py3.12,py3.13}-pure_eval @@ -310,12 +304,6 @@ deps = openai-latest: tiktoken~=0.6.0 openai-notiktoken: openai - # OpenTelemetry (OTel) - opentelemetry: opentelemetry-distro - - # OpenTelemetry Experimental (POTel) - potel: -e .[opentelemetry-experimental] - # pure_eval pure_eval: pure_eval @@ -441,8 +429,6 @@ setenv = loguru: TESTPATH=tests/integrations/loguru openai: TESTPATH=tests/integrations/openai openfeature: TESTPATH=tests/integrations/openfeature - opentelemetry: TESTPATH=tests/integrations/opentelemetry - potel: TESTPATH=tests/integrations/opentelemetry pure_eval: TESTPATH=tests/integrations/pure_eval pymongo: TESTPATH=tests/integrations/pymongo pyramid: TESTPATH=tests/integrations/pyramid diff --git a/sentry_sdk/client.py b/sentry_sdk/client.py index c4599cd840..2f4349253b 100644 --- a/sentry_sdk/client.py +++ b/sentry_sdk/client.py @@ -35,6 +35,7 @@ from sentry_sdk.integrations.dedupe import DedupeIntegration from sentry_sdk.sessions import SessionFlusher from sentry_sdk.envelope import Envelope + from sentry_sdk.profiler.continuous_profiler import setup_continuous_profiler from sentry_sdk.profiler.transaction_profiler import ( has_profiling_enabled, @@ -392,6 +393,13 @@ def _capture_envelope(envelope): except Exception as e: logger.debug("Can not set up continuous profiler. (%s)", e) + from sentry_sdk.opentelemetry.tracing import ( + patch_readable_span, + setup_sentry_tracing, + ) + + patch_readable_span() + setup_sentry_tracing() finally: _client_init_debug.set(old_debug) diff --git a/sentry_sdk/integrations/__init__.py b/sentry_sdk/integrations/__init__.py index fcfcccebd0..c7967852a7 100644 --- a/sentry_sdk/integrations/__init__.py +++ b/sentry_sdk/integrations/__init__.py @@ -69,7 +69,6 @@ def iter_default_integrations(with_auto_enabling_integrations): "sentry_sdk.integrations.excepthook.ExcepthookIntegration", "sentry_sdk.integrations.logging.LoggingIntegration", "sentry_sdk.integrations.modules.ModulesIntegration", - "sentry_sdk.integrations.opentelemetry.integration.OpenTelemetryIntegration", "sentry_sdk.integrations.stdlib.StdlibIntegration", "sentry_sdk.integrations.threading.ThreadingIntegration", ] diff --git a/sentry_sdk/integrations/opentelemetry/__init__.py b/sentry_sdk/integrations/opentelemetry/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/sentry_sdk/integrations/opentelemetry/integration.py b/sentry_sdk/integrations/opentelemetry/integration.py deleted file mode 100644 index ef5bd34f96..0000000000 --- a/sentry_sdk/integrations/opentelemetry/integration.py +++ /dev/null @@ -1,80 +0,0 @@ -""" -IMPORTANT: The contents of this file are part of a proof of concept and as such -are experimental and not suitable for production use. They may be changed or -removed at any time without prior notice. -""" - -from sentry_sdk.integrations import DidNotEnable, Integration -from sentry_sdk.opentelemetry import ( - SentryPropagator, - SentrySampler, - SentrySpanProcessor, -) -from sentry_sdk.utils import logger - -try: - from opentelemetry import trace - from opentelemetry.propagate import set_global_textmap - from opentelemetry.sdk.trace import TracerProvider, Span, ReadableSpan -except ImportError: - raise DidNotEnable("opentelemetry not installed") - -try: - from opentelemetry.instrumentation.django import DjangoInstrumentor # type: ignore[import-not-found] -except ImportError: - DjangoInstrumentor = None - - -CONFIGURABLE_INSTRUMENTATIONS = { - DjangoInstrumentor: {"is_sql_commentor_enabled": True}, -} - - -class OpenTelemetryIntegration(Integration): - identifier = "opentelemetry" - - @staticmethod - def setup_once(): - # type: () -> None - logger.warning( - "[OTel] Initializing highly experimental OpenTelemetry support. " - "Use at your own risk." - ) - - _setup_sentry_tracing() - _patch_readable_span() - # _setup_instrumentors() - - logger.debug("[OTel] Finished setting up OpenTelemetry integration") - - -def _patch_readable_span(): - # type: () -> None - """ - We need to pass through sentry specific metadata/objects from Span to ReadableSpan - to work with them consistently in the SpanProcessor. - """ - old_readable_span = Span._readable_span - - def sentry_patched_readable_span(self): - # type: (Span) -> ReadableSpan - readable_span = old_readable_span(self) - readable_span._sentry_meta = getattr(self, "_sentry_meta", {}) # type: ignore[attr-defined] - return readable_span - - Span._readable_span = sentry_patched_readable_span # type: ignore[method-assign] - - -def _setup_sentry_tracing(): - # type: () -> None - provider = TracerProvider(sampler=SentrySampler()) - provider.add_span_processor(SentrySpanProcessor()) - trace.set_tracer_provider(provider) - - set_global_textmap(SentryPropagator()) - - -def _setup_instrumentors(): - # type: () -> None - for instrumentor, kwargs in CONFIGURABLE_INSTRUMENTATIONS.items(): - instrumentor().instrument(**kwargs) diff --git a/sentry_sdk/opentelemetry/tracing.py b/sentry_sdk/opentelemetry/tracing.py new file mode 100644 index 0000000000..8392c1515a --- /dev/null +++ b/sentry_sdk/opentelemetry/tracing.py @@ -0,0 +1,35 @@ +from opentelemetry import trace +from opentelemetry.propagate import set_global_textmap +from opentelemetry.sdk.trace import TracerProvider, Span, ReadableSpan + +from sentry_sdk.opentelemetry import ( + SentryPropagator, + SentrySampler, + SentrySpanProcessor, +) + + +def patch_readable_span(): + # type: () -> None + """ + We need to pass through sentry specific metadata/objects from Span to ReadableSpan + to work with them consistently in the SpanProcessor. + """ + old_readable_span = Span._readable_span + + def sentry_patched_readable_span(self): + # type: (Span) -> ReadableSpan + readable_span = old_readable_span(self) + readable_span._sentry_meta = getattr(self, "_sentry_meta", {}) # type: ignore[attr-defined] + return readable_span + + Span._readable_span = sentry_patched_readable_span # type: ignore[method-assign] + + +def setup_sentry_tracing(): + # type: () -> None + provider = TracerProvider(sampler=SentrySampler()) + provider.add_span_processor(SentrySpanProcessor()) + trace.set_tracer_provider(provider) + + set_global_textmap(SentryPropagator()) diff --git a/tests/integrations/opentelemetry/__init__.py b/tests/opentelemetry/__init__.py similarity index 100% rename from tests/integrations/opentelemetry/__init__.py rename to tests/opentelemetry/__init__.py diff --git a/tests/integrations/opentelemetry/test_compat.py b/tests/opentelemetry/test_compat.py similarity index 100% rename from tests/integrations/opentelemetry/test_compat.py rename to tests/opentelemetry/test_compat.py diff --git a/tests/integrations/opentelemetry/test_entry_points.py b/tests/opentelemetry/test_entry_points.py similarity index 100% rename from tests/integrations/opentelemetry/test_entry_points.py rename to tests/opentelemetry/test_entry_points.py diff --git a/tests/integrations/opentelemetry/test_potel.py b/tests/opentelemetry/test_potel.py similarity index 100% rename from tests/integrations/opentelemetry/test_potel.py rename to tests/opentelemetry/test_potel.py diff --git a/tests/integrations/opentelemetry/test_propagator.py b/tests/opentelemetry/test_propagator.py similarity index 100% rename from tests/integrations/opentelemetry/test_propagator.py rename to tests/opentelemetry/test_propagator.py diff --git a/tests/integrations/opentelemetry/test_sampler.py b/tests/opentelemetry/test_sampler.py similarity index 100% rename from tests/integrations/opentelemetry/test_sampler.py rename to tests/opentelemetry/test_sampler.py diff --git a/tests/integrations/opentelemetry/test_utils.py b/tests/opentelemetry/test_utils.py similarity index 100% rename from tests/integrations/opentelemetry/test_utils.py rename to tests/opentelemetry/test_utils.py diff --git a/tox.ini b/tox.ini index 3777f8f0ca..744b7c62a0 100644 --- a/tox.ini +++ b/tox.ini @@ -10,7 +10,7 @@ # The file (and all resulting CI YAMLs) then need to be regenerated via # "scripts/generate-test-files.sh". # -# Last generated: 2025-04-10T12:24:07.555804+00:00 +# Last generated: 2025-04-10T13:32:09.968256+00:00 [tox] requires = @@ -107,12 +107,6 @@ envlist = {py3.9,py3.11,py3.12}-openai-latest {py3.9,py3.11,py3.12}-openai-notiktoken - # OpenTelemetry (OTel) - {py3.7,py3.9,py3.12,py3.13}-opentelemetry - - # OpenTelemetry Experimental (POTel) - {py3.8,py3.9,py3.10,py3.11,py3.12,py3.13}-potel - # pure_eval {py3.7,py3.12,py3.13}-pure_eval @@ -435,12 +429,6 @@ deps = openai-latest: tiktoken~=0.6.0 openai-notiktoken: openai - # OpenTelemetry (OTel) - opentelemetry: opentelemetry-distro - - # OpenTelemetry Experimental (POTel) - potel: -e .[opentelemetry-experimental] - # pure_eval pure_eval: pure_eval @@ -757,8 +745,6 @@ setenv = loguru: TESTPATH=tests/integrations/loguru openai: TESTPATH=tests/integrations/openai openfeature: TESTPATH=tests/integrations/openfeature - opentelemetry: TESTPATH=tests/integrations/opentelemetry - potel: TESTPATH=tests/integrations/opentelemetry pure_eval: TESTPATH=tests/integrations/pure_eval pymongo: TESTPATH=tests/integrations/pymongo pyramid: TESTPATH=tests/integrations/pyramid From 1e69dabf8dd5211781f74358520c098ec18e67fa Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Tue, 15 Apr 2025 14:56:47 +0200 Subject: [PATCH 228/244] feat(logging): Do not capture errors from `LoggingIntegration` to Sentry by default (#4300) Fixes #4187 --- MIGRATION_GUIDE.md | 1 + sentry_sdk/integrations/logging.py | 2 +- tests/integrations/flask/test_flask.py | 9 +- tests/integrations/logging/test_logging.py | 95 ++++++++++++++----- .../integrations/starlette/test_starlette.py | 5 +- tests/test_logs.py | 5 +- tests/test_scrubber.py | 6 +- 7 files changed, 93 insertions(+), 30 deletions(-) diff --git a/MIGRATION_GUIDE.md b/MIGRATION_GUIDE.md index 87a81313eb..d05b617e4c 100644 --- a/MIGRATION_GUIDE.md +++ b/MIGRATION_GUIDE.md @@ -26,6 +26,7 @@ Looking to upgrade from Sentry SDK 2.x to 3.x? Here's a comprehensive list of wh - `sentry_sdk.init` now returns `None` instead of a context manager. - The `sampling_context` argument of `traces_sampler` and `profiles_sampler` now additionally contains all span attributes known at span start. - We updated how we handle `ExceptionGroup`s. You will now get more data if ExceptionGroups are appearing in chained exceptions. It could happen that after updating the SDK the grouping of issues change because of this. So eventually you will see the same exception in two Sentry issues (one from before the update, one from after the update) +- The integration for Python `logging` module does not send Sentry issues by default anymore when calling `logging.error()`, `logging.critical()` or `logging.exception()`. If you want to preserve the old behavior use `sentry_sdk.init(integrations=[LoggingIntegration(event_level="ERROR")])`. - The integration-specific content of the `sampling_context` argument of `traces_sampler` and `profiles_sampler` now looks different. - The Celery integration doesn't add the `celery_job` dictionary anymore. Instead, the individual keys are now available as: diff --git a/sentry_sdk/integrations/logging.py b/sentry_sdk/integrations/logging.py index 0647740d1f..6caa6dab34 100644 --- a/sentry_sdk/integrations/logging.py +++ b/sentry_sdk/integrations/logging.py @@ -23,7 +23,7 @@ from typing import Optional DEFAULT_LEVEL = logging.INFO -DEFAULT_EVENT_LEVEL = logging.ERROR +DEFAULT_EVENT_LEVEL = None # None means no events are captured LOGGING_TO_EVENT_LEVEL = { logging.NOTSET: "notset", logging.DEBUG: "debug", diff --git a/tests/integrations/flask/test_flask.py b/tests/integrations/flask/test_flask.py index 4e92df7e7c..a95393c585 100644 --- a/tests/integrations/flask/test_flask.py +++ b/tests/integrations/flask/test_flask.py @@ -285,7 +285,7 @@ def index(): try: raise ValueError("stuff") except Exception: - logging.exception("stuff happened") + sentry_sdk.capture_exception() 1 / 0 envelopes = capture_envelopes() @@ -875,7 +875,12 @@ def index(): def test_request_not_modified_by_reference(sentry_init, capture_events, app): - sentry_init(integrations=[flask_sentry.FlaskIntegration()]) + sentry_init( + integrations=[ + flask_sentry.FlaskIntegration(), + LoggingIntegration(event_level="ERROR"), + ] + ) @app.route("/", methods=["POST"]) def index(): diff --git a/tests/integrations/logging/test_logging.py b/tests/integrations/logging/test_logging.py index 8c325bc86c..13135f8e47 100644 --- a/tests/integrations/logging/test_logging.py +++ b/tests/integrations/logging/test_logging.py @@ -15,42 +15,71 @@ def reset_level(): logger.setLevel(logging.DEBUG) -@pytest.mark.parametrize("logger", [logger, other_logger]) -def test_logging_works_with_many_loggers(sentry_init, capture_events, logger): - sentry_init(integrations=[LoggingIntegration(event_level="ERROR")]) +@pytest.mark.parametrize("integrations", [None, [], [LoggingIntegration()]]) +@pytest.mark.parametrize( + "kwargs", [{"exc_info": None}, {}, {"exc_info": 0}, {"exc_info": False}] +) +def test_logging_defaults(integrations, sentry_init, capture_events, kwargs): + sentry_init(integrations=integrations) events = capture_events() logger.info("bread") - logger.critical("LOL") - (event,) = events - assert event["level"] == "fatal" - assert not event["logentry"]["params"] - assert event["logentry"]["message"] == "LOL" - assert any(crumb["message"] == "bread" for crumb in event["breadcrumbs"]["values"]) + logger.error("error") + logger.critical("LOL", **kwargs) + + assert len(events) == 0 -@pytest.mark.parametrize("integrations", [None, [], [LoggingIntegration()]]) @pytest.mark.parametrize( "kwargs", [{"exc_info": None}, {}, {"exc_info": 0}, {"exc_info": False}] ) -def test_logging_defaults(integrations, sentry_init, capture_events, kwargs): - sentry_init(integrations=integrations) +def test_logging_basic(sentry_init, capture_events, kwargs): + sentry_init(integrations=[LoggingIntegration(event_level=logging.ERROR)]) events = capture_events() logger.info("bread") + logger.error("error") logger.critical("LOL", **kwargs) - (event,) = events + (error_event, critical_event) = events - assert event["level"] == "fatal" - assert any(crumb["message"] == "bread" for crumb in event["breadcrumbs"]["values"]) + assert error_event["level"] == "error" + assert any( + crumb["message"] == "bread" for crumb in error_event["breadcrumbs"]["values"] + ) assert not any( - crumb["message"] == "LOL" for crumb in event["breadcrumbs"]["values"] + crumb["message"] == "LOL" for crumb in error_event["breadcrumbs"]["values"] ) - assert "threads" not in event + assert "threads" not in error_event + + assert critical_event["level"] == "fatal" + assert any( + crumb["message"] == "bread" for crumb in critical_event["breadcrumbs"]["values"] + ) + assert not any( + crumb["message"] == "LOL" for crumb in critical_event["breadcrumbs"]["values"] + ) + assert "threads" not in critical_event + + +@pytest.mark.parametrize("logger", [logger, other_logger]) +def test_logging_works_with_many_loggers(sentry_init, capture_events, logger): + sentry_init(integrations=[LoggingIntegration(event_level="ERROR")]) + events = capture_events() + + logger.info("bread") + logger.critical("LOL") + (event,) = events + assert event["level"] == "fatal" + assert not event["logentry"]["params"] + assert event["logentry"]["message"] == "LOL" + assert any(crumb["message"] == "bread" for crumb in event["breadcrumbs"]["values"]) def test_logging_extra_data(sentry_init, capture_events): - sentry_init(integrations=[LoggingIntegration()], default_integrations=False) + sentry_init( + integrations=[LoggingIntegration(event_level=logging.ERROR)], + default_integrations=False, + ) events = capture_events() logger.info("bread", extra=dict(foo=42)) @@ -67,7 +96,10 @@ def test_logging_extra_data(sentry_init, capture_events): def test_logging_extra_data_integer_keys(sentry_init, capture_events): - sentry_init(integrations=[LoggingIntegration()], default_integrations=False) + sentry_init( + integrations=[LoggingIntegration(event_level=logging.ERROR)], + default_integrations=False, + ) events = capture_events() logger.critical("integer in extra keys", extra={1: 1}) @@ -85,7 +117,10 @@ def test_logging_extra_data_integer_keys(sentry_init, capture_events): ), ) def test_logging_stack_trace(sentry_init, capture_events, enable_stack_trace_kwarg): - sentry_init(integrations=[LoggingIntegration()], default_integrations=False) + sentry_init( + integrations=[LoggingIntegration(event_level=logging.ERROR)], + default_integrations=False, + ) events = capture_events() logger.error("first", **enable_stack_trace_kwarg) @@ -104,7 +139,10 @@ def test_logging_stack_trace(sentry_init, capture_events, enable_stack_trace_kwa def test_logging_level(sentry_init, capture_events): - sentry_init(integrations=[LoggingIntegration()], default_integrations=False) + sentry_init( + integrations=[LoggingIntegration(event_level=logging.ERROR)], + default_integrations=False, + ) events = capture_events() logger.setLevel(logging.WARNING) @@ -158,7 +196,10 @@ def test_custom_log_level_names(sentry_init, capture_events): def test_logging_filters(sentry_init, capture_events): - sentry_init(integrations=[LoggingIntegration()], default_integrations=False) + sentry_init( + integrations=[LoggingIntegration(event_level=logging.ERROR)], + default_integrations=False, + ) events = capture_events() should_log = False @@ -210,7 +251,10 @@ def test_logging_captured_warnings(sentry_init, capture_events, recwarn): def test_ignore_logger(sentry_init, capture_events): - sentry_init(integrations=[LoggingIntegration()], default_integrations=False) + sentry_init( + integrations=[LoggingIntegration(event_level=logging.ERROR)], + default_integrations=False, + ) events = capture_events() ignore_logger("testfoo") @@ -221,7 +265,10 @@ def test_ignore_logger(sentry_init, capture_events): def test_ignore_logger_wildcard(sentry_init, capture_events): - sentry_init(integrations=[LoggingIntegration()], default_integrations=False) + sentry_init( + integrations=[LoggingIntegration(event_level=logging.ERROR)], + default_integrations=False, + ) events = capture_events() ignore_logger("testfoo.*") diff --git a/tests/integrations/starlette/test_starlette.py b/tests/integrations/starlette/test_starlette.py index a3c289590b..bf89729b35 100644 --- a/tests/integrations/starlette/test_starlette.py +++ b/tests/integrations/starlette/test_starlette.py @@ -13,6 +13,7 @@ from sentry_sdk import capture_message, get_baggage, get_traceparent from sentry_sdk.integrations.asgi import SentryAsgiMiddleware +from sentry_sdk.integrations.logging import LoggingIntegration from sentry_sdk.integrations.starlette import ( StarletteIntegration, StarletteRequestExtractor, @@ -943,7 +944,9 @@ def test_active_thread_id(sentry_init, capture_envelopes, teardown_profiling, en def test_original_request_not_scrubbed(sentry_init, capture_events): - sentry_init(integrations=[StarletteIntegration()]) + sentry_init( + integrations=[StarletteIntegration(), LoggingIntegration(event_level="ERROR")] + ) events = capture_events() diff --git a/tests/test_logs.py b/tests/test_logs.py index 01fe574726..584fdcc608 100644 --- a/tests/test_logs.py +++ b/tests/test_logs.py @@ -331,7 +331,10 @@ def test_logging_errors(sentry_init, capture_envelopes): """ The python logger module should be able to log errors without erroring """ - sentry_init(_experiments={"enable_logs": True}) + sentry_init( + _experiments={"enable_logs": True}, + integrations=[LoggingIntegration(event_level="ERROR")], + ) envelopes = capture_envelopes() python_logger = logging.Logger("test-logger") diff --git a/tests/test_scrubber.py b/tests/test_scrubber.py index ee209da4b1..cc99411778 100644 --- a/tests/test_scrubber.py +++ b/tests/test_scrubber.py @@ -2,6 +2,7 @@ import logging from sentry_sdk import capture_exception, capture_event, start_span +from sentry_sdk.integrations.logging import LoggingIntegration from sentry_sdk.utils import event_from_exception from sentry_sdk.scrubber import EventScrubber from tests.conftest import ApproxDict @@ -119,7 +120,10 @@ def test_stack_var_scrubbing(sentry_init, capture_events): def test_breadcrumb_extra_scrubbing(sentry_init, capture_events): - sentry_init(max_breadcrumbs=2) + sentry_init( + max_breadcrumbs=2, + integrations=[LoggingIntegration(event_level="ERROR")], + ) events = capture_events() logger.info("breadcrumb 1", extra=dict(foo=1, password="secret")) logger.info("breadcrumb 2", extra=dict(bar=2, auth="secret")) From 571ff3bc797b24cfdfe9b9ed189774390e71926d Mon Sep 17 00:00:00 2001 From: Ivana Kellyer Date: Wed, 16 Apr 2025 12:58:21 +0200 Subject: [PATCH 229/244] (Partially) fix POTel CI (#4307) Porting stuff from master and other fixes * add correct `event_level` to new logging tests (on `potel-base`, we don't capture logging errors by default so this has to be set explicitly) * add compat for `start_transaction` * re-enable an old test **Note:** This still leaves one failing threading test, will address that separately --- sentry_sdk/tracing.py | 6 +++ tests/integrations/logging/test_logging.py | 10 ++++- .../integrations/threading/test_threading.py | 3 +- tests/opentelemetry/test_compat.py | 45 +++++++++++++++++++ 4 files changed, 60 insertions(+), 4 deletions(-) diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py index c56a7e729c..00fe816e8f 100644 --- a/sentry_sdk/tracing.py +++ b/sentry_sdk/tracing.py @@ -183,16 +183,22 @@ def __init__( only_if_parent=False, # type: bool parent_span=None, # type: Optional[Span] otel_span=None, # type: Optional[OtelSpan] + span=None, # type: Optional[Span] ): # type: (...) -> None """ If otel_span is passed explicitly, just acts as a proxy. + If span is passed explicitly, use it. The only purpose of this param + if backwards compatibility with start_transaction(transaction=...). + If only_if_parent is True, just return an INVALID_SPAN and avoid instrumentation if there's no active parent span. """ if otel_span is not None: self._otel_span = otel_span + elif span is not None: + self._otel_span = span._otel_span else: skip_span = False if only_if_parent and parent_span is None: diff --git a/tests/integrations/logging/test_logging.py b/tests/integrations/logging/test_logging.py index e1b24a9853..e54fd829f1 100644 --- a/tests/integrations/logging/test_logging.py +++ b/tests/integrations/logging/test_logging.py @@ -296,7 +296,10 @@ def test_ignore_logger_wildcard(sentry_init, capture_events): def test_logging_dictionary_interpolation(sentry_init, capture_events): """Here we test an entire dictionary being interpolated into the log message.""" - sentry_init(integrations=[LoggingIntegration()], default_integrations=False) + sentry_init( + integrations=[LoggingIntegration(event_level=logging.ERROR)], + default_integrations=False, + ) events = capture_events() logger.error("this is a log with a dictionary %s", {"foo": "bar"}) @@ -312,7 +315,10 @@ def test_logging_dictionary_interpolation(sentry_init, capture_events): def test_logging_dictionary_args(sentry_init, capture_events): """Here we test items from a dictionary being interpolated into the log message.""" - sentry_init(integrations=[LoggingIntegration()], default_integrations=False) + sentry_init( + integrations=[LoggingIntegration(event_level=logging.ERROR)], + default_integrations=False, + ) events = capture_events() logger.error( diff --git a/tests/integrations/threading/test_threading.py b/tests/integrations/threading/test_threading.py index b1770874e5..11b4ee5301 100644 --- a/tests/integrations/threading/test_threading.py +++ b/tests/integrations/threading/test_threading.py @@ -104,7 +104,6 @@ def double(number): assert len(event["spans"]) == 0 -@pytest.mark.skip(reason="Temporarily disable to release SDK 2.0a1.") def test_circular_references(sentry_init, request): sentry_init(default_integrations=False, integrations=[ThreadingIntegration()]) @@ -232,7 +231,7 @@ def do_some_work(number): threads = [] - with sentry_sdk.start_transaction(op="outer-trx"): + with sentry_sdk.start_span(op="outer-trx"): for number in range(5): with sentry_sdk.start_span( op=f"outer-submit-{number}", name="Thread: main" diff --git a/tests/opentelemetry/test_compat.py b/tests/opentelemetry/test_compat.py index 1ae73494cd..381d9ad22e 100644 --- a/tests/opentelemetry/test_compat.py +++ b/tests/opentelemetry/test_compat.py @@ -1,4 +1,5 @@ import sentry_sdk +from sentry_sdk.tracing import Transaction def test_transaction_name_span_description_compat( @@ -52,3 +53,47 @@ def test_transaction_name_span_description_compat( assert span["op"] == "span-op" assert span["data"]["sentry.op"] == "span-op" assert span["data"]["sentry.description"] == "span-desc" + + +def test_start_transaction_compat( + sentry_init, + capture_events, +): + sentry_init(traces_sample_rate=1.0) + + events = capture_events() + + with sentry_sdk.start_transaction( + name="trx-name", + op="trx-op", + ): + ... + + transaction = events[0] + assert transaction["transaction"] == "trx-name" + assert transaction["contexts"]["trace"]["op"] == "trx-op" + assert transaction["contexts"]["trace"]["data"]["sentry.op"] == "trx-op" + assert transaction["contexts"]["trace"]["data"]["sentry.name"] == "trx-name" + assert "sentry.description" not in transaction["contexts"]["trace"]["data"] + + +def test_start_transaction_with_explicit_transaction_compat( + sentry_init, + capture_events, +): + """It should still be possible to provide a ready-made Transaction to start_transaction.""" + sentry_init(traces_sample_rate=1.0) + + events = capture_events() + + transaction = Transaction(name="trx-name", op="trx-op") + + with sentry_sdk.start_transaction(transaction=transaction): + pass + + transaction = events[0] + assert transaction["transaction"] == "trx-name" + assert transaction["contexts"]["trace"]["op"] == "trx-op" + assert transaction["contexts"]["trace"]["data"]["sentry.op"] == "trx-op" + assert transaction["contexts"]["trace"]["data"]["sentry.name"] == "trx-name" + assert "sentry.description" not in transaction["contexts"]["trace"]["data"] From a45c4f1839d5eb70e6598810d8403608d8510ae0 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Thu, 24 Apr 2025 12:57:24 +0200 Subject: [PATCH 230/244] Set SDK version (#4322) For testing purposes I set the next major version of the SDK. --- docs/conf.py | 2 +- sentry_sdk/consts.py | 2 +- setup.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index c8242bbfb0..5c9477b78d 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -33,7 +33,7 @@ copyright = "2019-{}, Sentry Team and Contributors".format(datetime.now().year) author = "Sentry Team and Contributors" -release = "2.26.1" +release = "3.0.0" version = ".".join(release.split(".")[:2]) # The short X.Y version. diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py index ae7d69aa8c..40fc145558 100644 --- a/sentry_sdk/consts.py +++ b/sentry_sdk/consts.py @@ -985,4 +985,4 @@ def _get_default_options(): del _get_default_options -VERSION = "2.26.1" +VERSION = "3.0.0" diff --git a/setup.py b/setup.py index f7b6255387..c6c98eb8f6 100644 --- a/setup.py +++ b/setup.py @@ -21,7 +21,7 @@ def get_file_text(file_name): setup( name="sentry-sdk", - version="2.26.1", + version="3.0.0", author="Sentry Team and Contributors", author_email="hello@sentry.io", url="https://github.com/getsentry/sentry-python", From 404695160e38339c1a836adb3c3a665c9577d28e Mon Sep 17 00:00:00 2001 From: Daniel Szoke <7881302+szokeasaurusrex@users.noreply.github.com> Date: Thu, 24 Apr 2025 13:00:13 +0200 Subject: [PATCH 231/244] ref(scope): Properly type `Scope.root_span` (#4273) Currently, this property has type `Any`, but it can now be changed to `Optional[Span]` Depends on: - #4263 --- sentry_sdk/integrations/huey.py | 16 ++++++++++------ sentry_sdk/scope.py | 3 +-- 2 files changed, 11 insertions(+), 8 deletions(-) diff --git a/sentry_sdk/integrations/huey.py b/sentry_sdk/integrations/huey.py index 720d38f8e2..1d1c498843 100644 --- a/sentry_sdk/integrations/huey.py +++ b/sentry_sdk/integrations/huey.py @@ -111,11 +111,13 @@ def _capture_exception(exc_info): # type: (ExcInfo) -> None scope = sentry_sdk.get_current_scope() - if exc_info[0] in HUEY_CONTROL_FLOW_EXCEPTIONS: - scope.root_span.set_status(SPANSTATUS.ABORTED) - return + if scope.root_span is not None: + if exc_info[0] in HUEY_CONTROL_FLOW_EXCEPTIONS: + scope.root_span.set_status(SPANSTATUS.ABORTED) + return + + scope.root_span.set_status(SPANSTATUS.INTERNAL_ERROR) - scope.root_span.set_status(SPANSTATUS.INTERNAL_ERROR) event, hint = event_from_exception( exc_info, client_options=sentry_sdk.get_client().options, @@ -136,8 +138,10 @@ def _sentry_execute(*args, **kwargs): exc_info = sys.exc_info() _capture_exception(exc_info) reraise(*exc_info) - else: - sentry_sdk.get_current_scope().root_span.set_status(SPANSTATUS.OK) + + root_span = sentry_sdk.get_current_scope().root_span + if root_span is not None: + root_span.set_status(SPANSTATUS.OK) return result diff --git a/sentry_sdk/scope.py b/sentry_sdk/scope.py index 1f3162605f..5aca30d62d 100644 --- a/sentry_sdk/scope.py +++ b/sentry_sdk/scope.py @@ -692,8 +692,7 @@ def fingerprint(self, value): @property def root_span(self): - # type: () -> Any - # would be type: () -> Optional[Span], see https://github.com/python/mypy/issues/3004 + # type: () -> Optional[Span] """Return the root span in the scope, if any.""" # there is no span/transaction on the scope From d4b9d5038f22bead7aa29b49196d08e19dea08cc Mon Sep 17 00:00:00 2001 From: Daniel Szoke <7881302+szokeasaurusrex@users.noreply.github.com> Date: Thu, 24 Apr 2025 13:01:25 +0200 Subject: [PATCH 232/244] ref: Simplify `Scope.root_span` logic (#4274) Depends on: - #4273 - #4263 --- sentry_sdk/scope.py | 7 ------- 1 file changed, 7 deletions(-) diff --git a/sentry_sdk/scope.py b/sentry_sdk/scope.py index 5aca30d62d..975ac6fe04 100644 --- a/sentry_sdk/scope.py +++ b/sentry_sdk/scope.py @@ -694,16 +694,9 @@ def fingerprint(self, value): def root_span(self): # type: () -> Optional[Span] """Return the root span in the scope, if any.""" - - # there is no span/transaction on the scope if self._span is None: return None - # there is an orphan span on the scope - if self._span.root_span is None: - return None - # there is either a root span (which is its own root - # span) or a non-orphan span on the scope return self._span.root_span def set_transaction_name(self, name, source=None): From a7e24d9cc612ab79f479d8d438ee032c0a60821c Mon Sep 17 00:00:00 2001 From: Ivana Kellyer Date: Thu, 24 Apr 2025 14:45:00 +0200 Subject: [PATCH 233/244] Fix `propagate_scope=False` in `ThreadingIntegration` (#4310) `ThreadingIntegration` can optionally **NOT** propagate scope data to threads (`propagate_scope=False`). In that case, in POTel we were wrapping the thread's task in an `isolation_scope()`: ```python with sentry_sdk.isolation_scope() as scope: return _run_old_run_func() ``` But as this forks the currently active isolation scope, the thread effectively gets all scope data from the parent isolation scope -- so the scope is actually propagated to the thread, even though it shouldn't be since `propagate_scope=False`. ~We effectively need some way to give the thread a clear isolation scope instead. In this PR, I'm just clearing the forked iso scope, but I'm not sure if this is good enough and if something doesn't need to be done on the OTel side too.~ ~Another option would be to set the iso/current scopes to the initial, empty iso/current scopes instead, before running the thread's target function.~ UPDATE: we're just instantiating new scopes now Another change is that in OTel, the spans in the threads, now without a parent, automatically get promoted to transactions. (On master they'd just be orphaned spans, so they wouldn't be taken into account at all.) We probably need to instruct folks to add `only_if_parent` if they don't want this to happen. --------- Co-authored-by: Neel Shah --- sentry_sdk/__init__.py | 4 +- sentry_sdk/integrations/threading.py | 17 ++++---- sentry_sdk/integrations/wsgi.py | 2 +- sentry_sdk/opentelemetry/scope.py | 8 ++-- .../integrations/threading/test_threading.py | 40 ++++++++++++------- 5 files changed, 41 insertions(+), 30 deletions(-) diff --git a/sentry_sdk/__init__.py b/sentry_sdk/__init__.py index 63d9f03323..1529de592c 100644 --- a/sentry_sdk/__init__.py +++ b/sentry_sdk/__init__.py @@ -1,4 +1,6 @@ -from sentry_sdk.scope import Scope +# TODO-neel scope switch +# TODO-neel avoid duplication between api and __init__ +from sentry_sdk.opentelemetry.scope import PotelScope as Scope from sentry_sdk.transport import Transport, HttpTransport from sentry_sdk.client import Client diff --git a/sentry_sdk/integrations/threading.py b/sentry_sdk/integrations/threading.py index 660cd2d2f8..8d0bb69f9d 100644 --- a/sentry_sdk/integrations/threading.py +++ b/sentry_sdk/integrations/threading.py @@ -4,6 +4,8 @@ from threading import Thread, current_thread import sentry_sdk +from sentry_sdk import Scope +from sentry_sdk.scope import ScopeType from sentry_sdk.integrations import Integration from sentry_sdk.utils import ( event_from_exception, @@ -17,7 +19,6 @@ from typing import Any from typing import TypeVar from typing import Callable - from typing import Optional from sentry_sdk._types import ExcInfo @@ -75,8 +76,8 @@ def sentry_start(self, *a, **kw): isolation_scope = sentry_sdk.get_isolation_scope().fork() current_scope = sentry_sdk.get_current_scope().fork() else: - isolation_scope = None - current_scope = None + isolation_scope = Scope(ty=ScopeType.ISOLATION) + current_scope = Scope(ty=ScopeType.CURRENT) # Patching instance methods in `start()` creates a reference cycle if # done in a naive way. See @@ -98,7 +99,7 @@ def sentry_start(self, *a, **kw): def _wrap_run(isolation_scope_to_use, current_scope_to_use, old_run_func): - # type: (Optional[sentry_sdk.Scope], Optional[sentry_sdk.Scope], F) -> F + # type: (sentry_sdk.Scope, sentry_sdk.Scope, F) -> F @wraps(old_run_func) def run(*a, **kw): # type: (*Any, **Any) -> Any @@ -110,12 +111,8 @@ def _run_old_run_func(): except Exception: reraise(*_capture_exception()) - if isolation_scope_to_use is not None and current_scope_to_use is not None: - with sentry_sdk.use_isolation_scope(isolation_scope_to_use): - with sentry_sdk.use_scope(current_scope_to_use): - return _run_old_run_func() - else: - with sentry_sdk.isolation_scope(): + with sentry_sdk.use_isolation_scope(isolation_scope_to_use): + with sentry_sdk.use_scope(current_scope_to_use): return _run_old_run_func() return run # type: ignore diff --git a/sentry_sdk/integrations/wsgi.py b/sentry_sdk/integrations/wsgi.py index 85983b18c4..88708d6080 100644 --- a/sentry_sdk/integrations/wsgi.py +++ b/sentry_sdk/integrations/wsgi.py @@ -254,7 +254,7 @@ class _ScopedResponse: __slots__ = ("_response", "_scope") def __init__(self, scope, response): - # type: (sentry_sdk.scope.Scope, Iterator[bytes]) -> None + # type: (sentry_sdk.Scope, Iterator[bytes]) -> None self._scope = scope self._response = response diff --git a/sentry_sdk/opentelemetry/scope.py b/sentry_sdk/opentelemetry/scope.py index f12adc29f0..4db5e288e3 100644 --- a/sentry_sdk/opentelemetry/scope.py +++ b/sentry_sdk/opentelemetry/scope.py @@ -175,7 +175,7 @@ def setup_scope_context_management(): @contextmanager def isolation_scope(): - # type: () -> Generator[Scope, None, None] + # type: () -> Generator[PotelScope, None, None] context = set_value(SENTRY_FORK_ISOLATION_SCOPE_KEY, True) token = attach(context) try: @@ -186,7 +186,7 @@ def isolation_scope(): @contextmanager def new_scope(): - # type: () -> Generator[Scope, None, None] + # type: () -> Generator[PotelScope, None, None] token = attach(get_current()) try: yield PotelScope.get_current_scope() @@ -196,7 +196,7 @@ def new_scope(): @contextmanager def use_scope(scope): - # type: (Scope) -> Generator[Scope, None, None] + # type: (PotelScope) -> Generator[PotelScope, None, None] context = set_value(SENTRY_USE_CURRENT_SCOPE_KEY, scope) token = attach(context) @@ -208,7 +208,7 @@ def use_scope(scope): @contextmanager def use_isolation_scope(isolation_scope): - # type: (Scope) -> Generator[Scope, None, None] + # type: (PotelScope) -> Generator[PotelScope, None, None] context = set_value(SENTRY_USE_ISOLATION_SCOPE_KEY, isolation_scope) token = attach(context) diff --git a/tests/integrations/threading/test_threading.py b/tests/integrations/threading/test_threading.py index 11b4ee5301..9de9a4de47 100644 --- a/tests/integrations/threading/test_threading.py +++ b/tests/integrations/threading/test_threading.py @@ -232,7 +232,7 @@ def do_some_work(number): threads = [] with sentry_sdk.start_span(op="outer-trx"): - for number in range(5): + for number in range(2): with sentry_sdk.start_span( op=f"outer-submit-{number}", name="Thread: main" ): @@ -243,32 +243,44 @@ def do_some_work(number): for t in threads: t.join() - (event,) = events if propagate_scope: + # The children spans from the threads become parts of the existing span + # tree since we propagated the scope + assert len(events) == 1 + (event,) = events + assert render_span_tree(event) == dedent( """\ - op="outer-trx": description=null - op="outer-submit-0": description="Thread: main" - op="inner-run-0": description="Thread: child-0" - op="outer-submit-1": description="Thread: main" - - op="inner-run-1": description="Thread: child-1" - - op="outer-submit-2": description="Thread: main" - - op="inner-run-2": description="Thread: child-2" - - op="outer-submit-3": description="Thread: main" - - op="inner-run-3": description="Thread: child-3" - - op="outer-submit-4": description="Thread: main" - - op="inner-run-4": description="Thread: child-4"\ + - op="inner-run-1": description="Thread: child-1"\ """ ) elif not propagate_scope: - assert render_span_tree(event) == dedent( + # The spans from the threads become their own root spans/transactions + # as the connection to the parent span was severed when the scope was + # cleared + assert len(events) == 3 + (event1, event2, event3) = sorted(events, key=render_span_tree) + + assert render_span_tree(event1) == dedent( + """\ + - op="inner-run-0": description=null\ +""" + ) + assert render_span_tree(event2) == dedent( + """\ + - op="inner-run-1": description=null\ +""" + ) + + assert render_span_tree(event3) == dedent( """\ - op="outer-trx": description=null - op="outer-submit-0": description="Thread: main" - - op="outer-submit-1": description="Thread: main" - - op="outer-submit-2": description="Thread: main" - - op="outer-submit-3": description="Thread: main" - - op="outer-submit-4": description="Thread: main"\ + - op="outer-submit-1": description="Thread: main"\ """ ) From 6d2e9888dde46fc87fb2b0a2d4b34df06c6e33fc Mon Sep 17 00:00:00 2001 From: Ivana Kellyer Date: Thu, 24 Apr 2025 14:59:59 +0200 Subject: [PATCH 234/244] Test Common w/ multiple OTel versions & add compat with old OTel (#4312) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit With the switch to OTel, the Common test suite is now dependent on an otel package, so it technically fits the toxgen usecase. By letting toxgen take care of it, we're making sure we're always testing a good range of otel versions, including the oldest one (to catch regressions) and the newest one (to catch incompatibilities early). Couple things surfaced in terms of incompatibility with older versions: - Some semantic attributes we're using weren't there from the get go https://github.com/open-telemetry/opentelemetry-python/commit/495d70545f84cf0695c4c35aef4b47354f4a4cfe. Changed the code that uses them to handle failure. - The signature of `span.set_status()` changed at some point https://github.com/open-telemetry/opentelemetry-python/commit/6e282d27e5a7fa337322dda154fe6eecf64380f0. Added a compat version of `set_status()` for older otel. Also included: - removing the `opentelemetry-experimental` extra (not used anymore) - ❗ switching to using `opentelemetry-sdk` instead of `opentelemetry-distro` -- the `distro` only seems to [be setting up some defaults](https://github.com/open-telemetry/opentelemetry-python-contrib/blob/8390db35ae2062c09d4d74a08d310c7bde1912c4/opentelemetry-distro/src/opentelemetry/distro/__init__.py) that we're not using Closes https://github.com/getsentry/sentry-python/issues/3241 --- scripts/populate_tox/README.md | 35 ++++++++++++++----- scripts/populate_tox/config.py | 12 +++++++ scripts/populate_tox/populate_tox.py | 31 ++++++++++------ scripts/populate_tox/tox.jinja | 12 ------- sentry_sdk/integrations/__init__.py | 1 + sentry_sdk/opentelemetry/utils.py | 8 ++++- sentry_sdk/tracing.py | 11 ++++-- setup.py | 3 +- .../integrations/threading/test_threading.py | 9 ++++- tests/opentelemetry/test_utils.py | 29 +++++++++++++++ tox.ini | 32 ++++++++++------- 11 files changed, 133 insertions(+), 50 deletions(-) diff --git a/scripts/populate_tox/README.md b/scripts/populate_tox/README.md index c9a3b67ba0..39bf627ea1 100644 --- a/scripts/populate_tox/README.md +++ b/scripts/populate_tox/README.md @@ -18,6 +18,7 @@ then determining which versions make sense to test to get good coverage. The lowest supported and latest version of a framework are always tested, with a number of releases in between: + - If the package has majors, we pick the highest version of each major. For the latest major, we also pick the lowest version in that major. - If the package doesn't have multiple majors, we pick two versions in between @@ -35,7 +36,8 @@ the main package (framework, library) to test with; any additional test dependencies, optionally gated behind specific conditions; and optionally the Python versions to test on. -Constraints are defined using the format specified below. The following sections describe each key. +Constraints are defined using the format specified below. The following sections +describe each key. ``` integration_name: { @@ -46,6 +48,7 @@ integration_name: { }, "python": python_version_specifier, "include": package_version_specifier, + "test_on_all_python_versions": bool, } ``` @@ -68,11 +71,12 @@ The test dependencies of the test suite. They're defined as a dictionary of in the package list of a rule will be installed as long as the rule applies. `rule`s are predefined. Each `rule` must be one of the following: - - `*`: packages will be always installed - - a version specifier on the main package (e.g. `<=0.32`): packages will only - be installed if the main package falls into the version bounds specified - - specific Python version(s) in the form `py3.8,py3.9`: packages will only be - installed if the Python version matches one from the list + +- `*`: packages will be always installed +- a version specifier on the main package (e.g. `<=0.32`): packages will only + be installed if the main package falls into the version bounds specified +- specific Python version(s) in the form `py3.8,py3.9`: packages will only be + installed if the Python version matches one from the list Rules can be used to specify version bounds on older versions of the main package's dependencies, for example. If e.g. Flask tests generally need @@ -101,6 +105,7 @@ Python versions, you can say: ... } ``` + This key is optional. ### `python` @@ -145,7 +150,6 @@ The `include` key can also be used to exclude a set of specific versions by usin `!=` version specifiers. For example, the Starlite restriction above could equivalently be expressed like so: - ```python "starlite": { "include": "!=2.0.0a1,!=2.0.0a2", @@ -153,6 +157,19 @@ be expressed like so: } ``` +### `test_on_all_python_versions` + +By default, the script will cherry-pick a few Python versions to test each +integration on. If you want a test suite to run on all supported Python versions +instead, set `test_on_all_python_versions` to `True`. + +```python +"common": { + # The common test suite should run on all Python versions + "test_on_all_python_versions": True, + ... +} +``` ## How-Tos @@ -176,7 +193,8 @@ A handful of integration test suites are still hardcoded. The goal is to migrate them all to `populate_tox.py` over time. 1. Remove the integration from the `IGNORE` list in `populate_tox.py`. -2. Remove the hardcoded entries for the integration from the `envlist` and `deps` sections of `tox.jinja`. +2. Remove the hardcoded entries for the integration from the `envlist` and `deps` + sections of `tox.jinja`. 3. Run `scripts/generate-test-files.sh`. 4. Run the test suite, either locally or by creating a PR. 5. Address any test failures that happen. @@ -185,6 +203,7 @@ You might have to introduce additional version bounds on the dependencies of the package. Try to determine the source of the failure and address it. Common scenarios: + - An old version of the tested package installs a dependency without defining an upper version bound on it. A new version of the dependency is installed that is incompatible with the package. In this case you need to determine which diff --git a/scripts/populate_tox/config.py b/scripts/populate_tox/config.py index 2b52c980dc..9019df4271 100644 --- a/scripts/populate_tox/config.py +++ b/scripts/populate_tox/config.py @@ -29,6 +29,18 @@ "clickhouse_driver": { "package": "clickhouse-driver", }, + "common": { + "package": "opentelemetry-sdk", + "test_on_all_python_versions": True, + "deps": { + "*": ["pytest", "pytest-asyncio"], + # See https://github.com/pytest-dev/pytest/issues/9621 + # and https://github.com/pytest-dev/pytest-forked/issues/67 + # for justification of the upper bound on pytest + "py3.7": ["pytest<7.0.0"], + "py3.8": ["hypothesis"], + }, + }, "cohere": { "package": "cohere", "python": ">=3.9", diff --git a/scripts/populate_tox/populate_tox.py b/scripts/populate_tox/populate_tox.py index 11ea94c0f4..5295480cd2 100644 --- a/scripts/populate_tox/populate_tox.py +++ b/scripts/populate_tox/populate_tox.py @@ -61,7 +61,6 @@ "asgi", "aws_lambda", "cloud_resource_context", - "common", "gevent", "opentelemetry", "potel", @@ -348,22 +347,28 @@ def supported_python_versions( return supported -def pick_python_versions_to_test(python_versions: list[Version]) -> list[Version]: +def pick_python_versions_to_test( + python_versions: list[Version], test_all: bool = False +) -> list[Version]: """ Given a list of Python versions, pick those that make sense to test on. Currently, this is the oldest, the newest, and the second newest Python version. """ - filtered_python_versions = { - python_versions[0], - } + if test_all: + filtered_python_versions = python_versions - filtered_python_versions.add(python_versions[-1]) - try: - filtered_python_versions.add(python_versions[-2]) - except IndexError: - pass + else: + filtered_python_versions = { + python_versions[0], + } + + filtered_python_versions.add(python_versions[-1]) + try: + filtered_python_versions.add(python_versions[-2]) + except IndexError: + pass return sorted(filtered_python_versions) @@ -517,6 +522,9 @@ def _add_python_versions_to_release( time.sleep(PYPI_COOLDOWN) # give PYPI some breathing room + test_on_all_python_versions = ( + TEST_SUITE_CONFIG[integration].get("test_on_all_python_versions") or False + ) target_python_versions = TEST_SUITE_CONFIG[integration].get("python") if target_python_versions: target_python_versions = SpecifierSet(target_python_versions) @@ -525,7 +533,8 @@ def _add_python_versions_to_release( supported_python_versions( determine_python_versions(release_pypi_data), target_python_versions, - ) + ), + test_all=test_on_all_python_versions, ) release.rendered_python_versions = _render_python_versions(release.python_versions) diff --git a/scripts/populate_tox/tox.jinja b/scripts/populate_tox/tox.jinja index bec77445e4..5ebb02827c 100644 --- a/scripts/populate_tox/tox.jinja +++ b/scripts/populate_tox/tox.jinja @@ -17,9 +17,6 @@ requires = # This version introduced using pip 24.1 which does not work with older Celery and HTTPX versions. virtualenv<20.26.3 envlist = - # === Common === - {py3.7,py3.8,py3.9,py3.10,py3.11,py3.12,py3.13}-common - # === Gevent === {py3.8,py3.10,py3.11,py3.12}-gevent @@ -157,15 +154,6 @@ deps = linters: -r requirements-linting.txt linters: werkzeug<2.3.0 - # === Common === - py3.8-common: hypothesis - common: pytest-asyncio - # See https://github.com/pytest-dev/pytest/issues/9621 - # and https://github.com/pytest-dev/pytest-forked/issues/67 - # for justification of the upper bound on pytest - py3.7-common: pytest<7.0.0 - {py3.8,py3.9,py3.10,py3.11,py3.12,py3.13}-common: pytest - # === Gevent === {py3.7,py3.8,py3.9,py3.10,py3.11}-gevent: gevent>=22.10.0, <22.11.0 {py3.12}-gevent: gevent diff --git a/sentry_sdk/integrations/__init__.py b/sentry_sdk/integrations/__init__.py index eb4f8787be..f2d1a28522 100644 --- a/sentry_sdk/integrations/__init__.py +++ b/sentry_sdk/integrations/__init__.py @@ -131,6 +131,7 @@ def iter_default_integrations(with_auto_enabling_integrations): "celery": (4, 4, 7), "chalice": (1, 16, 0), "clickhouse_driver": (0, 2, 0), + "common": (1, 4, 0), # opentelemetry-sdk "cohere": (5, 4, 0), "django": (2, 0), "dramatiq": (1, 9), diff --git a/sentry_sdk/opentelemetry/utils.py b/sentry_sdk/opentelemetry/utils.py index ade9858855..aa10e849ac 100644 --- a/sentry_sdk/opentelemetry/utils.py +++ b/sentry_sdk/opentelemetry/utils.py @@ -282,7 +282,13 @@ def infer_status_from_attributes(span_attributes): def get_http_status_code(span_attributes): # type: (Mapping[str, str | bool | int | float | Sequence[str] | Sequence[bool] | Sequence[int] | Sequence[float]]) -> Optional[int] - http_status = span_attributes.get(SpanAttributes.HTTP_RESPONSE_STATUS_CODE) + try: + http_status = span_attributes.get(SpanAttributes.HTTP_RESPONSE_STATUS_CODE) + except AttributeError: + # HTTP_RESPONSE_STATUS_CODE was added in 1.21, so if we're on an older + # OTel version SpanAttributes.HTTP_RESPONSE_STATUS_CODE will throw an + # AttributeError + http_status = None if http_status is None: # Fall back to the deprecated attribute diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py index 00fe816e8f..a235448558 100644 --- a/sentry_sdk/tracing.py +++ b/sentry_sdk/tracing.py @@ -11,8 +11,9 @@ get_current_span, INVALID_SPAN, ) -from opentelemetry.trace.status import StatusCode +from opentelemetry.trace.status import Status, StatusCode from opentelemetry.sdk.trace import ReadableSpan +from opentelemetry.version import __version__ as otel_version import sentry_sdk from sentry_sdk.consts import ( @@ -41,6 +42,7 @@ from sentry_sdk.utils import ( _serialize_span_attribute, get_current_thread_meta, + parse_version, should_be_treated_as_error, ) @@ -70,6 +72,8 @@ from sentry_sdk.tracing_utils import Baggage +_OTEL_VERSION = parse_version(otel_version) + tracer = otel_trace.get_tracer(__name__) @@ -531,7 +535,10 @@ def set_status(self, status): otel_status = StatusCode.ERROR otel_description = status - self._otel_span.set_status(otel_status, otel_description) + if _OTEL_VERSION is None or _OTEL_VERSION >= (1, 12, 0): + self._otel_span.set_status(otel_status, otel_description) + else: + self._otel_span.set_status(Status(otel_status, otel_description)) def set_measurement(self, name, value, unit=""): # type: (str, float, MeasurementUnit) -> None diff --git a/setup.py b/setup.py index c6c98eb8f6..a1b594c9c8 100644 --- a/setup.py +++ b/setup.py @@ -41,7 +41,7 @@ def get_file_text(file_name): install_requires=[ "urllib3>=1.26.11", "certifi", - "opentelemetry-distro>=0.35b0", # XXX check lower bound + "opentelemetry-sdk>=1.4.0", ], extras_require={ "aiohttp": ["aiohttp>=3.5"], @@ -70,7 +70,6 @@ def get_file_text(file_name): "openai": ["openai>=1.0.0", "tiktoken>=0.3.0"], "openfeature": ["openfeature-sdk>=0.7.1"], "opentelemetry": ["opentelemetry-distro>=0.35b0"], - "opentelemetry-experimental": ["opentelemetry-distro"], "pure-eval": ["pure_eval", "executing", "asttokens"], "pymongo": ["pymongo>=3.1"], "pyspark": ["pyspark>=2.4.4"], diff --git a/tests/integrations/threading/test_threading.py b/tests/integrations/threading/test_threading.py index 9de9a4de47..4ab742ff1f 100644 --- a/tests/integrations/threading/test_threading.py +++ b/tests/integrations/threading/test_threading.py @@ -2,12 +2,14 @@ from concurrent import futures from textwrap import dedent from threading import Thread +import sys import pytest import sentry_sdk from sentry_sdk import capture_message from sentry_sdk.integrations.threading import ThreadingIntegration +from sentry_sdk.tracing import _OTEL_VERSION original_start = Thread.start original_run = Thread.run @@ -104,13 +106,18 @@ def double(number): assert len(event["spans"]) == 0 +@pytest.mark.skipif( + sys.version[:3] == "3.8" and (1, 12) <= _OTEL_VERSION < (1, 16), + reason="Fails in CI on 3.8 and specific OTel versions", +) def test_circular_references(sentry_init, request): sentry_init(default_integrations=False, integrations=[ThreadingIntegration()]) - gc.collect() gc.disable() request.addfinalizer(gc.enable) + gc.collect() + class MyThread(Thread): def run(self): pass diff --git a/tests/opentelemetry/test_utils.py b/tests/opentelemetry/test_utils.py index b7bc055d3c..a73efd9b3b 100644 --- a/tests/opentelemetry/test_utils.py +++ b/tests/opentelemetry/test_utils.py @@ -2,6 +2,7 @@ import pytest from opentelemetry.trace import SpanKind, Status, StatusCode +from opentelemetry.version import __version__ as OTEL_VERSION from sentry_sdk.opentelemetry.utils import ( extract_span_data, @@ -9,6 +10,9 @@ span_data_for_db_query, span_data_for_http_method, ) +from sentry_sdk.utils import parse_version + +OTEL_VERSION = parse_version(OTEL_VERSION) @pytest.mark.parametrize( @@ -276,6 +280,9 @@ def test_span_data_for_db_query(): { "status": "unavailable", "http_status_code": 503, + # old otel versions won't take the new attribute into account + "status_old": "internal_error", + "http_status_code_old": 502, }, ), ( @@ -290,6 +297,9 @@ def test_span_data_for_db_query(): { "status": "unavailable", "http_status_code": 503, + # old otel versions won't take the new attribute into account + "status_old": "internal_error", + "http_status_code_old": 502, }, ), ( @@ -311,6 +321,7 @@ def test_span_data_for_db_query(): "http.method": "POST", "http.route": "/some/route", "http.response.status_code": 200, + "http.status_code": 200, }, { "status": "ok", @@ -326,6 +337,7 @@ def test_span_data_for_db_query(): "http.method": "POST", "http.route": "/some/route", "http.response.status_code": 401, + "http.status_code": 401, }, { "status": "unauthenticated", @@ -339,6 +351,7 @@ def test_span_data_for_db_query(): "http.method": "POST", "http.route": "/some/route", "http.response.status_code": 418, + "http.status_code": 418, }, { "status": "invalid_argument", @@ -372,4 +385,20 @@ def test_extract_span_status(kind, status, attributes, expected): "status": status, "http_status_code": http_status_code, } + + if ( + OTEL_VERSION < (1, 21) + and "status_old" in expected + and "http_status_code_old" in expected + ): + expected = { + "status": expected["status_old"], + "http_status_code": expected["http_status_code_old"], + } + else: + expected = { + "status": expected["status"], + "http_status_code": expected["http_status_code"], + } + assert result == expected diff --git a/tox.ini b/tox.ini index 1cad653899..53c37ca47e 100644 --- a/tox.ini +++ b/tox.ini @@ -10,16 +10,13 @@ # The file (and all resulting CI YAMLs) then need to be regenerated via # "scripts/generate-test-files.sh". # -# Last generated: 2025-04-17T11:01:25.976599+00:00 +# Last generated: 2025-04-17T12:20:33.943833+00:00 [tox] requires = # This version introduced using pip 24.1 which does not work with older Celery and HTTPX versions. virtualenv<20.26.3 envlist = - # === Common === - {py3.7,py3.8,py3.9,py3.10,py3.11,py3.12,py3.13}-common - # === Gevent === {py3.8,py3.10,py3.11,py3.12}-gevent @@ -136,6 +133,13 @@ envlist = # These come from the populate_tox.py script. Eventually we should move all # integration tests there. + # ~~~ Common ~~~ + {py3.7,py3.8,py3.9}-common-v1.4.1 + {py3.7,py3.8,py3.9,py3.10}-common-v1.13.0 + {py3.7,py3.8,py3.9,py3.10,py3.11}-common-v1.22.0 + {py3.8,py3.9,py3.10,py3.11,py3.12,py3.13}-common-v1.32.1 + + # ~~~ AI ~~~ {py3.9,py3.10,py3.11}-cohere-v5.4.0 {py3.9,py3.11,py3.12}-cohere-v5.9.4 @@ -305,15 +309,6 @@ deps = linters: -r requirements-linting.txt linters: werkzeug<2.3.0 - # === Common === - py3.8-common: hypothesis - common: pytest-asyncio - # See https://github.com/pytest-dev/pytest/issues/9621 - # and https://github.com/pytest-dev/pytest-forked/issues/67 - # for justification of the upper bound on pytest - py3.7-common: pytest<7.0.0 - {py3.8,py3.9,py3.10,py3.11,py3.12,py3.13}-common: pytest - # === Gevent === {py3.7,py3.8,py3.9,py3.10,py3.11}-gevent: gevent>=22.10.0, <22.11.0 {py3.12}-gevent: gevent @@ -494,6 +489,17 @@ deps = # These come from the populate_tox.py script. Eventually we should move all # integration tests there. + # ~~~ Common ~~~ + common-v1.4.1: opentelemetry-sdk==1.4.1 + common-v1.13.0: opentelemetry-sdk==1.13.0 + common-v1.22.0: opentelemetry-sdk==1.22.0 + common-v1.32.1: opentelemetry-sdk==1.32.1 + common: pytest + common: pytest-asyncio + py3.7-common: pytest<7.0.0 + py3.8-common: hypothesis + + # ~~~ AI ~~~ cohere-v5.4.0: cohere==5.4.0 cohere-v5.9.4: cohere==5.9.4 From 40eae25d0ae9d22d711abc26841d6897de2a82fe Mon Sep 17 00:00:00 2001 From: Ivana Kellyer Date: Mon, 28 Apr 2025 14:37:18 +0200 Subject: [PATCH 235/244] ref: Drop `set_measurement` (#4333) Closes https://github.com/getsentry/sentry-python/issues/4332 --------- Co-authored-by: Daniel Szoke <7881302+szokeasaurusrex@users.noreply.github.com> --- MIGRATION_GUIDE.md | 1 + sentry_sdk/__init__.py | 1 - sentry_sdk/_types.py | 41 ---------------- sentry_sdk/ai/monitoring.py | 6 +-- sentry_sdk/api.py | 8 ---- sentry_sdk/opentelemetry/consts.py | 1 - sentry_sdk/opentelemetry/span_processor.py | 4 -- sentry_sdk/opentelemetry/utils.py | 10 +--- sentry_sdk/tracing.py | 12 ----- .../integrations/anthropic/test_anthropic.py | 47 +++++++++---------- tests/integrations/cohere/test_cohere.py | 16 +++---- .../huggingface_hub/test_huggingface_hub.py | 4 +- .../integrations/langchain/test_langchain.py | 7 +-- tests/integrations/openai/test_openai.py | 32 ++++++------- tests/tracing/test_misc.py | 42 +---------------- 15 files changed, 57 insertions(+), 175 deletions(-) diff --git a/MIGRATION_GUIDE.md b/MIGRATION_GUIDE.md index d05b617e4c..0a40a659de 100644 --- a/MIGRATION_GUIDE.md +++ b/MIGRATION_GUIDE.md @@ -137,6 +137,7 @@ Looking to upgrade from Sentry SDK 2.x to 3.x? Here's a comprehensive list of wh - The `enable_tracing` `init` option has been removed. Configure `traces_sample_rate` directly. - The `propagate_traces` `init` option has been removed. Use `trace_propagation_targets` instead. - The `custom_sampling_context` parameter of `start_transaction` has been removed. Use `attributes` instead to set key-value pairs of data that should be accessible in the traces sampler. Note that span attributes need to conform to the [OpenTelemetry specification](https://opentelemetry.io/docs/concepts/signals/traces/#attributes), meaning only certain types can be set as values. +- `set_measurement` has been removed. - The PyMongo integration no longer sets tags. The data is still accessible via span attributes. - The PyMongo integration doesn't set `operation_ids` anymore. The individual IDs (`operation_id`, `request_id`, `session_id`) are now accessible as separate span attributes. - `sentry_sdk.metrics` and associated metrics APIs have been removed as Sentry no longer accepts metrics data in this form. See https://sentry.zendesk.com/hc/en-us/articles/26369339769883-Upcoming-API-Changes-to-Metrics diff --git a/sentry_sdk/__init__.py b/sentry_sdk/__init__.py index 1529de592c..b35c446dc0 100644 --- a/sentry_sdk/__init__.py +++ b/sentry_sdk/__init__.py @@ -36,7 +36,6 @@ "set_context", "set_extra", "set_level", - "set_measurement", "set_tag", "set_tags", "set_user", diff --git a/sentry_sdk/_types.py b/sentry_sdk/_types.py index 9b320966ea..79260e3431 100644 --- a/sentry_sdk/_types.py +++ b/sentry_sdk/_types.py @@ -107,7 +107,6 @@ def substituted_because_contains_sensitive_data(cls): from typing import Callable from typing import Dict from typing import Mapping - from typing import NotRequired from typing import Optional from typing import Type from typing_extensions import Literal, TypedDict @@ -120,45 +119,6 @@ class SDKInfo(TypedDict): # "critical" is an alias of "fatal" recognized by Relay LogLevelStr = Literal["fatal", "critical", "error", "warning", "info", "debug"] - DurationUnit = Literal[ - "nanosecond", - "microsecond", - "millisecond", - "second", - "minute", - "hour", - "day", - "week", - ] - - InformationUnit = Literal[ - "bit", - "byte", - "kilobyte", - "kibibyte", - "megabyte", - "mebibyte", - "gigabyte", - "gibibyte", - "terabyte", - "tebibyte", - "petabyte", - "pebibyte", - "exabyte", - "exbibyte", - ] - - FractionUnit = Literal["ratio", "percent"] - MeasurementUnit = Union[DurationUnit, InformationUnit, FractionUnit, str] - - MeasurementValue = TypedDict( - "MeasurementValue", - { - "value": float, - "unit": NotRequired[Optional[MeasurementUnit]], - }, - ) - Event = TypedDict( "Event", { @@ -180,7 +140,6 @@ class SDKInfo(TypedDict): "level": LogLevelStr, "logentry": Mapping[str, object], "logger": str, - "measurements": dict[str, MeasurementValue], "message": str, "modules": dict[str, str], "monitor_config": Mapping[str, object], diff --git a/sentry_sdk/ai/monitoring.py b/sentry_sdk/ai/monitoring.py index 08b6482da5..2b6a1cdf72 100644 --- a/sentry_sdk/ai/monitoring.py +++ b/sentry_sdk/ai/monitoring.py @@ -106,9 +106,9 @@ def record_token_usage( if ai_pipeline_name: span.set_attribute("ai.pipeline.name", ai_pipeline_name) if prompt_tokens is not None: - span.set_measurement("ai_prompt_tokens_used", value=prompt_tokens) + span.set_attribute("ai.prompt_tokens.used", prompt_tokens) if completion_tokens is not None: - span.set_measurement("ai_completion_tokens_used", value=completion_tokens) + span.set_attribute("ai.completion_tokens.used", completion_tokens) if ( total_tokens is None and prompt_tokens is not None @@ -116,4 +116,4 @@ def record_token_usage( ): total_tokens = prompt_tokens + completion_tokens if total_tokens is not None: - span.set_measurement("ai_total_tokens_used", total_tokens) + span.set_attribute("ai.total_tokens.used", total_tokens) diff --git a/sentry_sdk/api.py b/sentry_sdk/api.py index 2ded31ee48..b8a2498d5d 100644 --- a/sentry_sdk/api.py +++ b/sentry_sdk/api.py @@ -59,7 +59,6 @@ "set_context", "set_extra", "set_level", - "set_measurement", "set_tag", "set_tags", "set_user", @@ -287,13 +286,6 @@ def start_transaction( ) -def set_measurement(name, value, unit=""): - # type: (str, float, sentry_sdk._types.MeasurementUnit) -> None - transaction = get_current_scope().root_span - if transaction is not None: - transaction.set_measurement(name, value, unit) - - def get_current_span(scope=None): # type: (Optional[Scope]) -> Optional[sentry_sdk.tracing.Span] """ diff --git a/sentry_sdk/opentelemetry/consts.py b/sentry_sdk/opentelemetry/consts.py index 0e3cb54948..7f7afce9e2 100644 --- a/sentry_sdk/opentelemetry/consts.py +++ b/sentry_sdk/opentelemetry/consts.py @@ -26,7 +26,6 @@ class SentrySpanAttribute: DESCRIPTION = "sentry.description" OP = "sentry.op" ORIGIN = "sentry.origin" - MEASUREMENT = "sentry.measurement" TAG = "sentry.tag" NAME = "sentry.name" SOURCE = "sentry.source" diff --git a/sentry_sdk/opentelemetry/span_processor.py b/sentry_sdk/opentelemetry/span_processor.py index 6da616ed87..abfb712a89 100644 --- a/sentry_sdk/opentelemetry/span_processor.py +++ b/sentry_sdk/opentelemetry/span_processor.py @@ -304,10 +304,6 @@ def _common_span_transaction_attributes_as_json(self, span): "timestamp": convert_from_otel_timestamp(span.end_time), } # type: Event - measurements = extract_span_attributes(span, SentrySpanAttribute.MEASUREMENT) - if measurements: - common_json["measurements"] = measurements - tags = extract_span_attributes(span, SentrySpanAttribute.TAG) if tags: common_json["tags"] = tags diff --git a/sentry_sdk/opentelemetry/utils.py b/sentry_sdk/opentelemetry/utils.py index aa10e849ac..b9dbbd5f09 100644 --- a/sentry_sdk/opentelemetry/utils.py +++ b/sentry_sdk/opentelemetry/utils.py @@ -309,15 +309,7 @@ def extract_span_attributes(span, namespace): for attr, value in (span.attributes or {}).items(): if attr.startswith(namespace): key = attr[len(namespace) + 1 :] - - if namespace == SentrySpanAttribute.MEASUREMENT: - value = cast("tuple[str, str]", value) - extracted_attrs[key] = { - "value": float(value[0]), - "unit": value[1], - } - else: - extracted_attrs[key] = value + extracted_attrs[key] = value return extracted_attrs diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py index 7b8004c8b5..388cf38cef 100644 --- a/sentry_sdk/tracing.py +++ b/sentry_sdk/tracing.py @@ -65,7 +65,6 @@ R = TypeVar("R") from sentry_sdk._types import ( - MeasurementUnit, SamplingContext, ) @@ -150,10 +149,6 @@ def finish( # type: (...) -> None pass - def set_measurement(self, name, value, unit=""): - # type: (str, float, MeasurementUnit) -> None - pass - def set_context(self, key, value): # type: (str, dict[str, Any]) -> None pass @@ -540,13 +535,6 @@ def set_status(self, status): else: self._otel_span.set_status(Status(otel_status, otel_description)) - def set_measurement(self, name, value, unit=""): - # type: (str, float, MeasurementUnit) -> None - # Stringify value here since OTel expects all seq items to be of one type - self.set_attribute( - f"{SentrySpanAttribute.MEASUREMENT}.{name}", (str(value), unit) - ) - def set_thread(self, thread_id, thread_name): # type: (Optional[int], Optional[str]) -> None if thread_id is not None: diff --git a/tests/integrations/anthropic/test_anthropic.py b/tests/integrations/anthropic/test_anthropic.py index c318331972..5da9b870eb 100644 --- a/tests/integrations/anthropic/test_anthropic.py +++ b/tests/integrations/anthropic/test_anthropic.py @@ -127,9 +127,9 @@ def test_nonstreaming_create_message( assert SPANDATA.AI_INPUT_MESSAGES not in span["data"] assert SPANDATA.AI_RESPONSES not in span["data"] - assert span["measurements"]["ai_prompt_tokens_used"]["value"] == 10 - assert span["measurements"]["ai_completion_tokens_used"]["value"] == 20 - assert span["measurements"]["ai_total_tokens_used"]["value"] == 30 + assert span["data"]["ai.prompt_tokens.used"] == 10 + assert span["data"]["ai.completion_tokens.used"] == 20 + assert span["data"]["ai.total_tokens.used"] == 30 assert span["data"]["ai.streaming"] is False @@ -197,9 +197,9 @@ async def test_nonstreaming_create_message_async( assert SPANDATA.AI_INPUT_MESSAGES not in span["data"] assert SPANDATA.AI_RESPONSES not in span["data"] - assert span["measurements"]["ai_prompt_tokens_used"]["value"] == 10 - assert span["measurements"]["ai_completion_tokens_used"]["value"] == 20 - assert span["measurements"]["ai_total_tokens_used"]["value"] == 30 + assert span["data"]["ai.prompt_tokens.used"] == 10 + assert span["data"]["ai.completion_tokens.used"] == 20 + assert span["data"]["ai.total_tokens.used"] == 30 assert span["data"]["ai.streaming"] is False @@ -299,9 +299,9 @@ def test_streaming_create_message( assert SPANDATA.AI_INPUT_MESSAGES not in span["data"] assert SPANDATA.AI_RESPONSES not in span["data"] - assert span["measurements"]["ai_prompt_tokens_used"]["value"] == 10 - assert span["measurements"]["ai_completion_tokens_used"]["value"] == 30 - assert span["measurements"]["ai_total_tokens_used"]["value"] == 40 + assert span["data"]["ai.prompt_tokens.used"] == 10 + assert span["data"]["ai.completion_tokens.used"] == 30 + assert span["data"]["ai.total_tokens.used"] == 40 assert span["data"]["ai.streaming"] is True @@ -404,9 +404,9 @@ async def test_streaming_create_message_async( assert SPANDATA.AI_INPUT_MESSAGES not in span["data"] assert SPANDATA.AI_RESPONSES not in span["data"] - assert span["measurements"]["ai_prompt_tokens_used"]["value"] == 10 - assert span["measurements"]["ai_completion_tokens_used"]["value"] == 30 - assert span["measurements"]["ai_total_tokens_used"]["value"] == 40 + assert span["data"]["ai.prompt_tokens.used"] == 10 + assert span["data"]["ai.completion_tokens.used"] == 30 + assert span["data"]["ai.total_tokens.used"] == 40 assert span["data"]["ai.streaming"] is True @@ -536,9 +536,9 @@ def test_streaming_create_message_with_input_json_delta( assert SPANDATA.AI_INPUT_MESSAGES not in span["data"] assert SPANDATA.AI_RESPONSES not in span["data"] - assert span["measurements"]["ai_prompt_tokens_used"]["value"] == 366 - assert span["measurements"]["ai_completion_tokens_used"]["value"] == 51 - assert span["measurements"]["ai_total_tokens_used"]["value"] == 417 + assert span["data"]["ai.prompt_tokens.used"] == 366 + assert span["data"]["ai.completion_tokens.used"] == 51 + assert span["data"]["ai.total_tokens.used"] == 417 assert span["data"]["ai.streaming"] is True @@ -675,9 +675,9 @@ async def test_streaming_create_message_with_input_json_delta_async( assert SPANDATA.AI_INPUT_MESSAGES not in span["data"] assert SPANDATA.AI_RESPONSES not in span["data"] - assert span["measurements"]["ai_prompt_tokens_used"]["value"] == 366 - assert span["measurements"]["ai_completion_tokens_used"]["value"] == 51 - assert span["measurements"]["ai_total_tokens_used"]["value"] == 417 + assert span["data"]["ai.prompt_tokens.used"] == 366 + assert span["data"]["ai.completion_tokens.used"] == 51 + assert span["data"]["ai.total_tokens.used"] == 417 assert span["data"]["ai.streaming"] is True @@ -822,11 +822,6 @@ def test_add_ai_data_to_span_with_input_json_delta(sentry_init, capture_events): content_blocks=["{'test': 'data',", "'more': 'json'}"], ) - # assert span._data.get("ai.streaming") is True - # assert span._measurements.get("ai_prompt_tokens_used")["value"] == 10 - # assert span._measurements.get("ai_completion_tokens_used")["value"] == 20 - # assert span._measurements.get("ai_total_tokens_used")["value"] == 30 - (event,) = events assert len(event["spans"]) == 1 @@ -836,6 +831,6 @@ def test_add_ai_data_to_span_with_input_json_delta(sentry_init, capture_events): [{"type": "text", "text": "{'test': 'data','more': 'json'}"}] ) assert span["data"]["ai.streaming"] is True - assert span["measurements"]["ai_prompt_tokens_used"]["value"] == 10 - assert span["measurements"]["ai_completion_tokens_used"]["value"] == 20 - assert span["measurements"]["ai_total_tokens_used"]["value"] == 30 + assert span["data"]["ai.prompt_tokens.used"] == 10 + assert span["data"]["ai.completion_tokens.used"] == 20 + assert span["data"]["ai.total_tokens.used"] == 30 diff --git a/tests/integrations/cohere/test_cohere.py b/tests/integrations/cohere/test_cohere.py index ff41ceba11..25d1c30cf4 100644 --- a/tests/integrations/cohere/test_cohere.py +++ b/tests/integrations/cohere/test_cohere.py @@ -64,9 +64,9 @@ def test_nonstreaming_chat( assert "ai.input_messages" not in span["data"] assert "ai.responses" not in span["data"] - assert span["measurements"]["ai_completion_tokens_used"]["value"] == 10 - assert span["measurements"]["ai_prompt_tokens_used"]["value"] == 20 - assert span["measurements"]["ai_total_tokens_used"]["value"] == 30 + assert span["data"]["ai.completion_tokens.used"] == 10 + assert span["data"]["ai.prompt_tokens.used"] == 20 + assert span["data"]["ai.total_tokens.used"] == 30 # noinspection PyTypeChecker @@ -136,9 +136,9 @@ def test_streaming_chat(sentry_init, capture_events, send_default_pii, include_p assert "ai.input_messages" not in span["data"] assert "ai.responses" not in span["data"] - assert span["measurements"]["ai_completion_tokens_used"]["value"] == 10 - assert span["measurements"]["ai_prompt_tokens_used"]["value"] == 20 - assert span["measurements"]["ai_total_tokens_used"]["value"] == 30 + assert span["data"]["ai.completion_tokens.used"] == 10 + assert span["data"]["ai.prompt_tokens.used"] == 20 + assert span["data"]["ai.total_tokens.used"] == 30 def test_bad_chat(sentry_init, capture_events): @@ -200,8 +200,8 @@ def test_embed(sentry_init, capture_events, send_default_pii, include_prompts): else: assert "ai.input_messages" not in span["data"] - assert span["measurements"]["ai_prompt_tokens_used"]["value"] == 10 - assert span["measurements"]["ai_total_tokens_used"]["value"] == 10 + assert span["data"]["ai.prompt_tokens.used"] == 10 + assert span["data"]["ai.total_tokens.used"] == 10 def test_span_origin_chat(sentry_init, capture_events): diff --git a/tests/integrations/huggingface_hub/test_huggingface_hub.py b/tests/integrations/huggingface_hub/test_huggingface_hub.py index 17df29c331..9a867e718b 100644 --- a/tests/integrations/huggingface_hub/test_huggingface_hub.py +++ b/tests/integrations/huggingface_hub/test_huggingface_hub.py @@ -74,7 +74,7 @@ def test_nonstreaming_chat_completion( assert "ai.responses" not in span["data"] if details_arg: - assert span["measurements"]["ai_total_tokens_used"]["value"] == 10 + assert span["data"]["ai.total_tokens.used"] == 10 @pytest.mark.parametrize( @@ -133,7 +133,7 @@ def test_streaming_chat_completion( assert "ai.responses" not in span["data"] if details_arg: - assert span["measurements"]["ai_total_tokens_used"]["value"] == 10 + assert span["data"]["ai.total_tokens.used"] == 10 def test_bad_chat_completion(sentry_init, capture_events): diff --git a/tests/integrations/langchain/test_langchain.py b/tests/integrations/langchain/test_langchain.py index f8ab30054d..62f3eac04a 100644 --- a/tests/integrations/langchain/test_langchain.py +++ b/tests/integrations/langchain/test_langchain.py @@ -179,12 +179,13 @@ def test_langchain_agent( assert len(list(x for x in tx["spans"] if x["op"] == "ai.run.langchain")) > 0 if use_unknown_llm_type: - assert "ai_prompt_tokens_used" in chat_spans[0]["measurements"] - assert "ai_total_tokens_used" in chat_spans[0]["measurements"] + assert "ai.prompt_tokens.used" in chat_spans[0]["data"] + assert "ai.total_tokens.used" in chat_spans[0]["data"] else: # important: to avoid double counting, we do *not* measure # tokens used if we have an explicit integration (e.g. OpenAI) - assert "measurements" not in chat_spans[0] + assert "ai.prompt_tokens.used" not in chat_spans[0]["data"] + assert "ai.total_tokens.used" not in chat_spans[0]["data"] if send_default_pii and include_prompts: assert "You are very powerful" in chat_spans[0]["data"]["ai.input_messages"] diff --git a/tests/integrations/openai/test_openai.py b/tests/integrations/openai/test_openai.py index 0508d7d056..85ff95f377 100644 --- a/tests/integrations/openai/test_openai.py +++ b/tests/integrations/openai/test_openai.py @@ -89,9 +89,9 @@ def test_nonstreaming_chat_completion( assert "ai.input_messages" not in span["data"] assert "ai.responses" not in span["data"] - assert span["measurements"]["ai_completion_tokens_used"]["value"] == 10 - assert span["measurements"]["ai_prompt_tokens_used"]["value"] == 20 - assert span["measurements"]["ai_total_tokens_used"]["value"] == 30 + assert span["data"]["ai.completion_tokens.used"] == 10 + assert span["data"]["ai.prompt_tokens.used"] == 20 + assert span["data"]["ai.total_tokens.used"] == 30 @pytest.mark.asyncio @@ -131,9 +131,9 @@ async def test_nonstreaming_chat_completion_async( assert "ai.input_messages" not in span["data"] assert "ai.responses" not in span["data"] - assert span["measurements"]["ai_completion_tokens_used"]["value"] == 10 - assert span["measurements"]["ai_prompt_tokens_used"]["value"] == 20 - assert span["measurements"]["ai_total_tokens_used"]["value"] == 30 + assert span["data"]["ai.completion_tokens.used"] == 10 + assert span["data"]["ai.prompt_tokens.used"] == 20 + assert span["data"]["ai.total_tokens.used"] == 30 def tiktoken_encoding_if_installed(): @@ -227,9 +227,9 @@ def test_streaming_chat_completion( try: import tiktoken # type: ignore # noqa # pylint: disable=unused-import - assert span["measurements"]["ai_completion_tokens_used"]["value"] == 2 - assert span["measurements"]["ai_prompt_tokens_used"]["value"] == 1 - assert span["measurements"]["ai_total_tokens_used"]["value"] == 3 + assert span["data"]["ai.completion_tokens.used"] == 2 + assert span["data"]["ai.prompt_tokens.used"] == 1 + assert span["data"]["ai.total_tokens.used"] == 3 except ImportError: pass # if tiktoken is not installed, we can't guarantee token usage will be calculated properly @@ -323,9 +323,9 @@ async def test_streaming_chat_completion_async( try: import tiktoken # type: ignore # noqa # pylint: disable=unused-import - assert span["measurements"]["ai_completion_tokens_used"]["value"] == 2 - assert span["measurements"]["ai_prompt_tokens_used"]["value"] == 1 - assert span["measurements"]["ai_total_tokens_used"]["value"] == 3 + assert span["data"]["ai.completion_tokens.used"] == 2 + assert span["data"]["ai.prompt_tokens.used"] == 1 + assert span["data"]["ai.total_tokens.used"] == 3 except ImportError: pass # if tiktoken is not installed, we can't guarantee token usage will be calculated properly @@ -409,8 +409,8 @@ def test_embeddings_create( else: assert "ai.input_messages" not in span["data"] - assert span["measurements"]["ai_prompt_tokens_used"]["value"] == 20 - assert span["measurements"]["ai_total_tokens_used"]["value"] == 30 + assert span["data"]["ai.prompt_tokens.used"] == 20 + assert span["data"]["ai.total_tokens.used"] == 30 @pytest.mark.asyncio @@ -457,8 +457,8 @@ async def test_embeddings_create_async( else: assert "ai.input_messages" not in span["data"] - assert span["measurements"]["ai_prompt_tokens_used"]["value"] == 20 - assert span["measurements"]["ai_total_tokens_used"]["value"] == 30 + assert span["data"]["ai.prompt_tokens.used"] == 20 + assert span["data"]["ai.total_tokens.used"] == 30 @pytest.mark.forked diff --git a/tests/tracing/test_misc.py b/tests/tracing/test_misc.py index 5b0213d6c6..4d85594324 100644 --- a/tests/tracing/test_misc.py +++ b/tests/tracing/test_misc.py @@ -2,7 +2,7 @@ from unittest.mock import MagicMock import sentry_sdk -from sentry_sdk import start_span, set_measurement, get_current_scope +from sentry_sdk import start_span, get_current_scope from sentry_sdk.consts import MATCH_ALL from sentry_sdk.tracing_utils import should_propagate_trace from sentry_sdk.utils import Dsn @@ -115,46 +115,6 @@ def test_finds_spans_on_scope(sentry_init): assert child_span.root_span == root_span -def test_set_measurement(sentry_init, capture_events): - sentry_init(traces_sample_rate=1.0) - - events = capture_events() - - with start_span(name="measuring stuff") as span: - - with pytest.raises(TypeError): - span.set_measurement() - - with pytest.raises(TypeError): - span.set_measurement("metric.foo") - - span.set_measurement("metric.foo", 123) - span.set_measurement("metric.bar", 456, unit="second") - span.set_measurement("metric.baz", 420.69, unit="custom") - span.set_measurement("metric.foobar", 12, unit="percent") - span.set_measurement("metric.foobar", 17.99, unit="percent") - - (event,) = events - assert event["measurements"]["metric.foo"] == {"value": 123, "unit": ""} - assert event["measurements"]["metric.bar"] == {"value": 456, "unit": "second"} - assert event["measurements"]["metric.baz"] == {"value": 420.69, "unit": "custom"} - assert event["measurements"]["metric.foobar"] == {"value": 17.99, "unit": "percent"} - - -def test_set_measurement_public_api(sentry_init, capture_events): - sentry_init(traces_sample_rate=1.0) - - events = capture_events() - - with start_span(name="measuring stuff"): - set_measurement("metric.foo", 123) - set_measurement("metric.bar", 456, unit="second") - - (event,) = events - assert event["measurements"]["metric.foo"] == {"value": 123, "unit": ""} - assert event["measurements"]["metric.bar"] == {"value": 456, "unit": "second"} - - @pytest.mark.parametrize( "trace_propagation_targets,url,expected_propagation_decision", [ From 5b6d37cc0b5cc209e3cc6aa7a918ccdf03b4e473 Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Mon, 28 Apr 2025 14:53:44 +0200 Subject: [PATCH 236/244] Add migration note about SentrySpanProcessor and SentryPropagator --- MIGRATION_GUIDE.md | 1 + 1 file changed, 1 insertion(+) diff --git a/MIGRATION_GUIDE.md b/MIGRATION_GUIDE.md index 0a40a659de..187b3aaeb0 100644 --- a/MIGRATION_GUIDE.md +++ b/MIGRATION_GUIDE.md @@ -27,6 +27,7 @@ Looking to upgrade from Sentry SDK 2.x to 3.x? Here's a comprehensive list of wh - The `sampling_context` argument of `traces_sampler` and `profiles_sampler` now additionally contains all span attributes known at span start. - We updated how we handle `ExceptionGroup`s. You will now get more data if ExceptionGroups are appearing in chained exceptions. It could happen that after updating the SDK the grouping of issues change because of this. So eventually you will see the same exception in two Sentry issues (one from before the update, one from after the update) - The integration for Python `logging` module does not send Sentry issues by default anymore when calling `logging.error()`, `logging.critical()` or `logging.exception()`. If you want to preserve the old behavior use `sentry_sdk.init(integrations=[LoggingIntegration(event_level="ERROR")])`. +- The `SentrySpanProcessor` and `SentryPropagator` are exported from `sentry_sdk.opentelemetry` instead of `sentry_sdk.integrations.opentelemetry`. - The integration-specific content of the `sampling_context` argument of `traces_sampler` and `profiles_sampler` now looks different. - The Celery integration doesn't add the `celery_job` dictionary anymore. Instead, the individual keys are now available as: From a5d0f0128f1460f9d2c6694c118e47a3a6b251e2 Mon Sep 17 00:00:00 2001 From: Ivana Kellyer Date: Mon, 28 Apr 2025 16:13:29 +0200 Subject: [PATCH 237/244] Introduce underscore span attributes (#4331) Introduce the convention of underscore-prefixed span attributes. These won't be sent to Sentry and are meant for internal SDK usage. Changed `flag.count` to internal. Looked through the rest of the attrs we're setting and that stuff requires a big comprehensive cleanup altogether to make stuff align with OTel. Didn't touch anything else for now. Closes https://github.com/getsentry/sentry-python/issues/4329 --- sentry_sdk/opentelemetry/span_processor.py | 8 ++++++-- sentry_sdk/tracing.py | 4 ++-- .../integrations/threading/test_threading.py | 6 +----- tests/opentelemetry/test_span_processor.py | 19 +++++++++++++++++++ tests/test_feature_flags.py | 16 ++++++++++++++++ 5 files changed, 44 insertions(+), 9 deletions(-) create mode 100644 tests/opentelemetry/test_span_processor.py diff --git a/sentry_sdk/opentelemetry/span_processor.py b/sentry_sdk/opentelemetry/span_processor.py index abfb712a89..b5279bccb0 100644 --- a/sentry_sdk/opentelemetry/span_processor.py +++ b/sentry_sdk/opentelemetry/span_processor.py @@ -289,8 +289,12 @@ def _span_to_json(self, span): if parent_span_id: span_json["parent_span_id"] = parent_span_id - if span.attributes: - span_json["data"] = dict(span.attributes) + attributes = getattr(span, "attributes", {}) or {} + if attributes: + span_json["data"] = {} + for key, value in attributes.items(): + if not key.startswith("_"): + span_json["data"][key] = value return span_json diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py index 388cf38cef..92ac4d7671 100644 --- a/sentry_sdk/tracing.py +++ b/sentry_sdk/tracing.py @@ -588,10 +588,10 @@ def set_context(self, key, value): def set_flag(self, flag, value): # type: (str, bool) -> None - flag_count = self.get_attribute("flag.count") or 0 + flag_count = self.get_attribute("_flag.count") or 0 if flag_count < _FLAGS_CAPACITY: self.set_attribute(f"flag.evaluation.{flag}", value) - self.set_attribute("flag.count", flag_count + 1) + self.set_attribute("_flag.count", flag_count + 1) # TODO-neel-potel add deprecation diff --git a/tests/integrations/threading/test_threading.py b/tests/integrations/threading/test_threading.py index 4ab742ff1f..8a5dfef62b 100644 --- a/tests/integrations/threading/test_threading.py +++ b/tests/integrations/threading/test_threading.py @@ -9,7 +9,6 @@ import sentry_sdk from sentry_sdk import capture_message from sentry_sdk.integrations.threading import ThreadingIntegration -from sentry_sdk.tracing import _OTEL_VERSION original_start = Thread.start original_run = Thread.run @@ -106,10 +105,7 @@ def double(number): assert len(event["spans"]) == 0 -@pytest.mark.skipif( - sys.version[:3] == "3.8" and (1, 12) <= _OTEL_VERSION < (1, 16), - reason="Fails in CI on 3.8 and specific OTel versions", -) +@pytest.mark.skipif(sys.version[:3] == "3.8", reason="Fails in CI on 3.8") def test_circular_references(sentry_init, request): sentry_init(default_integrations=False, integrations=[ThreadingIntegration()]) diff --git a/tests/opentelemetry/test_span_processor.py b/tests/opentelemetry/test_span_processor.py new file mode 100644 index 0000000000..7d6283d4ea --- /dev/null +++ b/tests/opentelemetry/test_span_processor.py @@ -0,0 +1,19 @@ +import sentry_sdk + + +def test_span_processor_omits_underscore_attributes(sentry_init, capture_events): + sentry_init(traces_sample_rate=1.0) + + events = capture_events() + + with sentry_sdk.start_span(): + with sentry_sdk.start_span() as span: + span.set_attribute("_internal", 47) + span.set_attribute("noninternal", 23) + + assert span._otel_span.attributes["_internal"] == 47 + assert span._otel_span.attributes["noninternal"] == 23 + + outgoing_span = events[0]["spans"][0] + assert "_internal" not in outgoing_span["data"] + assert "noninternal" in outgoing_span["data"] diff --git a/tests/test_feature_flags.py b/tests/test_feature_flags.py index 1b0ed13d49..5c2f1cd352 100644 --- a/tests/test_feature_flags.py +++ b/tests/test_feature_flags.py @@ -259,3 +259,19 @@ def test_flag_limit(sentry_init, capture_events): } ) assert "flag.evaluation.10" not in event["spans"][0]["data"] + + +def test_flag_counter_not_sent(sentry_init, capture_events): + sentry_init(traces_sample_rate=1.0) + + events = capture_events() + + with start_transaction(name="hi"): + with start_span(op="foo", name="bar"): + add_feature_flag("0", True) + add_feature_flag("1", True) + add_feature_flag("2", True) + add_feature_flag("3", True) + + (event,) = events + assert "_flag.count" not in event["spans"][0]["data"] From 35fab6ba91a70041d97936a1b8b2c0b5f7761c32 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Tue, 29 Apr 2025 10:34:16 +0200 Subject: [PATCH 238/244] Traces sample rate default change (#4335) Revert changing the default of `traces_sample_rate` done in https://github.com/getsentry/sentry-python/pull/4240 --- sentry_sdk/consts.py | 2 +- tests/integrations/aiohttp/test_aiohttp.py | 5 +---- tests/integrations/asgi/test_asgi.py | 8 ++------ tests/integrations/django/asgi/test_asgi.py | 10 ++-------- tests/integrations/django/test_basic.py | 1 - tests/integrations/wsgi/test_wsgi.py | 8 ++------ tests/opentelemetry/test_sampler.py | 8 ++++---- tests/test_dsc.py | 4 ++-- tests/tracing/test_trace_propagation.py | 12 ++++++++++-- 9 files changed, 24 insertions(+), 34 deletions(-) diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py index 40fc145558..98d98a72ae 100644 --- a/sentry_sdk/consts.py +++ b/sentry_sdk/consts.py @@ -551,7 +551,7 @@ def __init__( debug=None, # type: Optional[bool] attach_stacktrace=False, # type: bool ca_certs=None, # type: Optional[str] - traces_sample_rate=0, # type: Optional[float] + traces_sample_rate=None, # type: Optional[float] traces_sampler=None, # type: Optional[TracesSampler] profiles_sample_rate=None, # type: Optional[float] profiles_sampler=None, # type: Optional[TracesSampler] diff --git a/tests/integrations/aiohttp/test_aiohttp.py b/tests/integrations/aiohttp/test_aiohttp.py index 8e0c74f365..bc019d54a4 100644 --- a/tests/integrations/aiohttp/test_aiohttp.py +++ b/tests/integrations/aiohttp/test_aiohttp.py @@ -461,10 +461,7 @@ async def hello(request): async def test_trace_from_headers_if_performance_disabled( sentry_init, aiohttp_client, capture_events ): - sentry_init( - integrations=[AioHttpIntegration()], - traces_sample_rate=None, # disable all performance monitoring - ) + sentry_init(integrations=[AioHttpIntegration()]) async def hello(request): capture_message("It's a good day to try dividing by 0") diff --git a/tests/integrations/asgi/test_asgi.py b/tests/integrations/asgi/test_asgi.py index f14e960fff..9e97ae3651 100644 --- a/tests/integrations/asgi/test_asgi.py +++ b/tests/integrations/asgi/test_asgi.py @@ -269,9 +269,7 @@ async def test_has_trace_if_performance_disabled( asgi3_app_with_error_and_msg, capture_events, ): - sentry_init( - traces_sample_rate=None, # disable all performance monitoring - ) + sentry_init() app = SentryAsgiMiddleware(asgi3_app_with_error_and_msg) with pytest.raises(ZeroDivisionError): @@ -327,9 +325,7 @@ async def test_trace_from_headers_if_performance_disabled( asgi3_app_with_error_and_msg, capture_events, ): - sentry_init( - traces_sample_rate=None, # disable all performance monitoring - ) + sentry_init() app = SentryAsgiMiddleware(asgi3_app_with_error_and_msg) trace_id = "582b43a4192642f0b136d5159a501701" diff --git a/tests/integrations/django/asgi/test_asgi.py b/tests/integrations/django/asgi/test_asgi.py index 3a5e47cfa8..c10a6b7b8e 100644 --- a/tests/integrations/django/asgi/test_asgi.py +++ b/tests/integrations/django/asgi/test_asgi.py @@ -335,10 +335,7 @@ async def test_has_trace_if_performance_enabled(sentry_init, capture_events): django.VERSION < (3, 1), reason="async views have been introduced in Django 3.1" ) async def test_has_trace_if_performance_disabled(sentry_init, capture_events): - sentry_init( - integrations=[DjangoIntegration()], - traces_sample_rate=None, # disable all performance monitoring - ) + sentry_init(integrations=[DjangoIntegration()]) events = capture_events() @@ -401,10 +398,7 @@ async def test_trace_from_headers_if_performance_enabled(sentry_init, capture_ev django.VERSION < (3, 1), reason="async views have been introduced in Django 3.1" ) async def test_trace_from_headers_if_performance_disabled(sentry_init, capture_events): - sentry_init( - integrations=[DjangoIntegration()], - traces_sample_rate=None, # disable all performance monitoring - ) + sentry_init(integrations=[DjangoIntegration()]) events = capture_events() diff --git a/tests/integrations/django/test_basic.py b/tests/integrations/django/test_basic.py index a37576315f..5b75bbb6af 100644 --- a/tests/integrations/django/test_basic.py +++ b/tests/integrations/django/test_basic.py @@ -241,7 +241,6 @@ def test_trace_from_headers_if_performance_disabled( http_methods_to_capture=("HEAD",), ) ], - traces_sample_rate=None, # disable all performance monitoring ) events = capture_events() diff --git a/tests/integrations/wsgi/test_wsgi.py b/tests/integrations/wsgi/test_wsgi.py index 149dd1d7d4..76c80f6c6a 100644 --- a/tests/integrations/wsgi/test_wsgi.py +++ b/tests/integrations/wsgi/test_wsgi.py @@ -238,9 +238,7 @@ def dogpark(environ, start_response): capture_message("Attempting to fetch the ball") raise ValueError("Fetch aborted. The ball was not returned.") - sentry_init( - traces_sample_rate=None, # disable all performance monitoring - ) + sentry_init() app = SentryWsgiMiddleware(dogpark) client = Client(app) events = capture_events() @@ -303,9 +301,7 @@ def dogpark(environ, start_response): capture_message("Attempting to fetch the ball") raise ValueError("Fetch aborted. The ball was not returned.") - sentry_init( - traces_sample_rate=None, # disable all performance monitoring - ) + sentry_init() app = SentryWsgiMiddleware(dogpark) client = Client(app) events = capture_events() diff --git a/tests/opentelemetry/test_sampler.py b/tests/opentelemetry/test_sampler.py index 7198f6b390..4ca1e1963f 100644 --- a/tests/opentelemetry/test_sampler.py +++ b/tests/opentelemetry/test_sampler.py @@ -178,8 +178,8 @@ def keep_only_a(sampling_context): @pytest.mark.parametrize( "traces_sample_rate, expected_num_of_envelopes", [ - # special case for testing, do not pass any traces_sample_rate to init() (the default traces_sample_rate=0 will be used) - (USE_DEFAULT_TRACES_SAMPLE_RATE, 1), + # special case for testing, do not pass any traces_sample_rate to init() (the default traces_sample_rate=None will be used) + (USE_DEFAULT_TRACES_SAMPLE_RATE, 0), # traces_sample_rate=None means do not create new traces, and also do not continue incoming traces. So, no envelopes at all. (None, 0), # traces_sample_rate=0 means do not create new traces (0% of the requests), but continue incoming traces. So envelopes will be created only if there is an incoming trace. @@ -229,9 +229,9 @@ def test_sampling_parent_sampled( @pytest.mark.parametrize( "traces_sample_rate, upstream_sampled, expected_num_of_envelopes", [ - # special case for testing, do not pass any traces_sample_rate to init() (the default traces_sample_rate=0 will be used) + # special case for testing, do not pass any traces_sample_rate to init() (the default traces_sample_rate=None will be used) (USE_DEFAULT_TRACES_SAMPLE_RATE, 0, 0), - (USE_DEFAULT_TRACES_SAMPLE_RATE, 1, 1), + (USE_DEFAULT_TRACES_SAMPLE_RATE, 1, 0), # traces_sample_rate=None means do not create new traces, and also do not continue incoming traces. So, no envelopes at all. (None, 0, 0), (None, 1, 0), diff --git a/tests/test_dsc.py b/tests/test_dsc.py index 569b7fd3dc..ea3c0b8988 100644 --- a/tests/test_dsc.py +++ b/tests/test_dsc.py @@ -287,8 +287,8 @@ def my_traces_sampler(sampling_context): "local_traces_sampler_result": None, "local_traces_sample_rate": None, }, - 1.0, # expected_sample_rate - "true", # expected_sampled + None, # expected_sample_rate + "tracing-disabled-no-transactions-should-be-sent", # expected_sampled (traces_sample_rate=None disables all transaction creation) ), ( # 6 traces_sampler overrides incoming (traces_sample_rate not set) { diff --git a/tests/tracing/test_trace_propagation.py b/tests/tracing/test_trace_propagation.py index cb4c3fc90d..358e3f48aa 100644 --- a/tests/tracing/test_trace_propagation.py +++ b/tests/tracing/test_trace_propagation.py @@ -190,7 +190,11 @@ def test_with_incoming_trace_and_trace_propagation_targets_matching( requests.get("http://example.com") # CHECK if performance data (a transaction/span) is sent to Sentry - if traces_sample_rate is None or incoming_parent_sampled == "0": + if ( + traces_sample_rate is None + or traces_sample_rate == USE_DEFAULT_TRACES_SAMPLE_RATE + or incoming_parent_sampled == "0" + ): assert len(events) == 0 else: if incoming_parent_sampled == "1" or traces_sample_rate == 1: @@ -264,7 +268,11 @@ def test_with_incoming_trace_and_trace_propagation_targets_not_matching( requests.get("http://example.com") # CHECK if performance data (a transaction/span) is sent to Sentry - if traces_sample_rate is None or incoming_parent_sampled == "0": + if ( + traces_sample_rate is None + or traces_sample_rate == USE_DEFAULT_TRACES_SAMPLE_RATE + or incoming_parent_sampled == "0" + ): assert len(events) == 0 else: if incoming_parent_sampled == "1" or traces_sample_rate == 1: From f1a3b236e8b518da9ff02c17c25260b420076a45 Mon Sep 17 00:00:00 2001 From: Ivana Kellyer Date: Mon, 5 May 2025 10:11:44 +0200 Subject: [PATCH 239/244] Fix migration guide formatting --- MIGRATION_GUIDE.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/MIGRATION_GUIDE.md b/MIGRATION_GUIDE.md index 187b3aaeb0..8e375c5d20 100644 --- a/MIGRATION_GUIDE.md +++ b/MIGRATION_GUIDE.md @@ -93,7 +93,7 @@ Looking to upgrade from Sentry SDK 2.x to 3.x? Here's a comprehensive list of wh | full URL | `url.full` | | `headers` | `http.request.header.{header}` | - -The RQ integration doesn't add the `rq_job` object anymore. Instead, the individual properties of the job and the queue, if available, are accessible as follows: + - The RQ integration doesn't add the `rq_job` object anymore. Instead, the individual properties of the job and the queue, if available, are accessible as follows: | RQ property | Sampling context key | Example | | --------------- | ---------------------------- | ---------------------- | From 060b2cf6f93dd35db4511a79c7ea557699cc8044 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Mon, 5 May 2025 10:20:56 +0200 Subject: [PATCH 240/244] Fix feature flags in potel (#4353) Store feature flags on the isolation scope, that is the correct place. I also checked back with Colton about the behavior of feature flags, and having the flags on the isolation scope (meaning: one set of flags per request-response cycle) is the expected behavior. --- .github/workflows/test-integrations-ai.yml | 2 +- .github/workflows/test-integrations-misc.yml | 2 +- sentry_sdk/feature_flags.py | 2 +- tests/integrations/fastapi/test_fastapi.py | 40 ++++++++++++++ tests/test_feature_flags.py | 57 ++++++++++++++++++++ 5 files changed, 100 insertions(+), 3 deletions(-) diff --git a/.github/workflows/test-integrations-ai.yml b/.github/workflows/test-integrations-ai.yml index fc7d551249..f82f317fbe 100644 --- a/.github/workflows/test-integrations-ai.yml +++ b/.github/workflows/test-integrations-ai.yml @@ -29,7 +29,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: ["3.7","3.9","3.11","3.12"] + python-version: ["3.9","3.11","3.12"] os: [ubuntu-22.04] steps: - uses: actions/checkout@v4.2.2 diff --git a/.github/workflows/test-integrations-misc.yml b/.github/workflows/test-integrations-misc.yml index 6198241fb0..2c8b4044d5 100644 --- a/.github/workflows/test-integrations-misc.yml +++ b/.github/workflows/test-integrations-misc.yml @@ -29,7 +29,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: ["3.7","3.8","3.10","3.11","3.12","3.13"] + python-version: ["3.7","3.8","3.9","3.10","3.11","3.12","3.13"] os: [ubuntu-22.04] steps: - uses: actions/checkout@v4.2.2 diff --git a/sentry_sdk/feature_flags.py b/sentry_sdk/feature_flags.py index ea551edd20..efc92661e7 100644 --- a/sentry_sdk/feature_flags.py +++ b/sentry_sdk/feature_flags.py @@ -64,7 +64,7 @@ def add_feature_flag(flag, result): Records a flag and its value to be sent on subsequent error events. We recommend you do this on flag evaluations. Flags are buffered per Sentry scope. """ - flags = sentry_sdk.get_current_scope().flags + flags = sentry_sdk.get_isolation_scope().flags flags.set(flag, result) span = sentry_sdk.get_current_span() diff --git a/tests/integrations/fastapi/test_fastapi.py b/tests/integrations/fastapi/test_fastapi.py index 1c40abedcb..cc435a5e38 100644 --- a/tests/integrations/fastapi/test_fastapi.py +++ b/tests/integrations/fastapi/test_fastapi.py @@ -10,7 +10,9 @@ from fastapi.testclient import TestClient from fastapi.middleware.trustedhost import TrustedHostMiddleware +import sentry_sdk from sentry_sdk import capture_message +from sentry_sdk.feature_flags import add_feature_flag from sentry_sdk.integrations.asgi import SentryAsgiMiddleware from sentry_sdk.integrations.fastapi import FastApiIntegration from sentry_sdk.integrations.starlette import StarletteIntegration @@ -671,3 +673,41 @@ async def subapp_route(): assert event["transaction"] == "/subapp" else: assert event["transaction"].endswith("subapp_route") + + +@pytest.mark.asyncio +async def test_feature_flags(sentry_init, capture_events): + sentry_init( + traces_sample_rate=1.0, + integrations=[StarletteIntegration(), FastApiIntegration()], + ) + + events = capture_events() + + app = FastAPI() + + @app.get("/error") + async def _error(): + add_feature_flag("hello", False) + + with sentry_sdk.start_span(name="test-span"): + with sentry_sdk.start_span(name="test-span-2"): + raise ValueError("something is wrong!") + + try: + client = TestClient(app) + client.get("/error") + except ValueError: + pass + + found = False + for event in events: + if "exception" in event.keys(): + assert event["contexts"]["flags"] == { + "values": [ + {"flag": "hello", "result": False}, + ] + } + found = True + + assert found, "No event with exception found" diff --git a/tests/test_feature_flags.py b/tests/test_feature_flags.py index 5c2f1cd352..43b611b89b 100644 --- a/tests/test_feature_flags.py +++ b/tests/test_feature_flags.py @@ -31,6 +31,63 @@ def test_featureflags_integration(sentry_init, capture_events, uninstall_integra } +@pytest.mark.asyncio +async def test_featureflags_integration_spans_async(sentry_init, capture_events): + sentry_init( + traces_sample_rate=1.0, + ) + events = capture_events() + + add_feature_flag("hello", False) + + try: + with sentry_sdk.start_span(name="test-span"): + with sentry_sdk.start_span(name="test-span-2"): + raise ValueError("something wrong!") + except ValueError as e: + sentry_sdk.capture_exception(e) + + found = False + for event in events: + if "exception" in event.keys(): + assert event["contexts"]["flags"] == { + "values": [ + {"flag": "hello", "result": False}, + ] + } + found = True + + assert found, "No event with exception found" + + +def test_featureflags_integration_spans_sync(sentry_init, capture_events): + sentry_init( + traces_sample_rate=1.0, + ) + events = capture_events() + + add_feature_flag("hello", False) + + try: + with sentry_sdk.start_span(name="test-span"): + with sentry_sdk.start_span(name="test-span-2"): + raise ValueError("something wrong!") + except ValueError as e: + sentry_sdk.capture_exception(e) + + found = False + for event in events: + if "exception" in event.keys(): + assert event["contexts"]["flags"] == { + "values": [ + {"flag": "hello", "result": False}, + ] + } + found = True + + assert found, "No event with exception found" + + def test_featureflags_integration_threaded( sentry_init, capture_events, uninstall_integration ): From 112c28fd8d6e994624b9085189450291c47d6dab Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Mon, 5 May 2025 13:41:45 +0200 Subject: [PATCH 241/244] Add logger.debug for sampler decisions for root spans (#4355) --- sentry_sdk/opentelemetry/sampler.py | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/sentry_sdk/opentelemetry/sampler.py b/sentry_sdk/opentelemetry/sampler.py index fb68b644b5..ab3defe3de 100644 --- a/sentry_sdk/opentelemetry/sampler.py +++ b/sentry_sdk/opentelemetry/sampler.py @@ -234,7 +234,7 @@ def should_sample( ) else: logger.debug( - f"[Tracing] Ignoring sampled param for non-root span {name}" + f"[Tracing.Sampler] Ignoring sampled param for non-root span {name}" ) # Check if there is a traces_sampler @@ -264,7 +264,7 @@ def should_sample( # If the sample rate is invalid, drop the span if not is_valid_sample_rate(sample_rate, source=self.__class__.__name__): logger.warning( - f"[Tracing] Discarding {name} because of invalid sample rate." + f"[Tracing.Sampler] Discarding {name} because of invalid sample rate." ) return dropped_result(parent_span_context, attributes) @@ -279,6 +279,11 @@ def should_sample( sampled = sample_rand < Decimal.from_float(sample_rate) if sampled: + if is_root_span: + logger.debug( + f"[Tracing.Sampler] Sampled #{name} with sample_rate: {sample_rate} and sample_rand: {sample_rand}" + ) + return sampled_result( parent_span_context, attributes, @@ -286,6 +291,11 @@ def should_sample( sample_rand=None if sample_rand == parent_sample_rand else sample_rand, ) else: + if is_root_span: + logger.debug( + f"[Tracing.Sampler] Dropped #{name} with sample_rate: {sample_rate} and sample_rand: {sample_rand}" + ) + return dropped_result( parent_span_context, attributes, From 8ee7dd41ffc194cf0c5aea085263084cdd0ea970 Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Mon, 5 May 2025 13:42:04 +0200 Subject: [PATCH 242/244] Respect parent_sampled decision in propagation_context sentry-trace header (#4356) Since we don't automatically have unsampled spans running, this caused a change in behavior when an upstream sampling decision needs to be propagated further downstream. ### Explanation of problem When an incoming trace has `sampled` set to 0 (`trace_id-span_id-0`), in the past we would propagate this since we would have an active span/transaction running but just not sampled, so downstream would also receive `trace_id-span_id-0` from that active span. Now, we actually don't have an active span since we don't sample (just how otel works), so instead of sending the `trace_id-span_id-0` as before, we would have sent `trace_id-other_span_id` from the `propagation_context` instead. This would cause the downstream service to not receive the `-0` flag and would thus sample independently, which is a regression. --- sentry_sdk/scope.py | 6 +----- sentry_sdk/tracing_utils.py | 15 +++++++++++++++ 2 files changed, 16 insertions(+), 5 deletions(-) diff --git a/sentry_sdk/scope.py b/sentry_sdk/scope.py index 975ac6fe04..dec8e70e22 100644 --- a/sentry_sdk/scope.py +++ b/sentry_sdk/scope.py @@ -505,11 +505,7 @@ def get_traceparent(self, *args, **kwargs): # If this scope has a propagation context, return traceparent from there if self._propagation_context is not None: - traceparent = "%s-%s" % ( - self._propagation_context.trace_id, - self._propagation_context.span_id, - ) - return traceparent + return self._propagation_context.to_traceparent() # Fall back to isolation scope's traceparent. It always has one return self.get_isolation_scope().get_traceparent() diff --git a/sentry_sdk/tracing_utils.py b/sentry_sdk/tracing_utils.py index a323b84199..20b88f1e32 100644 --- a/sentry_sdk/tracing_utils.py +++ b/sentry_sdk/tracing_utils.py @@ -432,6 +432,21 @@ def span_id(self, value): # type: (str) -> None self._span_id = value + def to_traceparent(self): + # type: () -> str + if self.parent_sampled is True: + sampled = "1" + elif self.parent_sampled is False: + sampled = "0" + else: + sampled = None + + traceparent = "%s-%s" % (self.trace_id, self.span_id) + if sampled is not None: + traceparent += "-%s" % (sampled,) + + return traceparent + def update(self, other_dict): # type: (Dict[str, Any]) -> None """ From a88dadf9f121df49a350d74f9cbd09a5907fe1af Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Mon, 5 May 2025 14:26:41 +0200 Subject: [PATCH 243/244] Fix FastAPI recursive exceptions (#4334) In some cases FastAPI emits an exception that has as `__cause__` an ExceptionGroup that contains a single excpetion. That single exepction is the original exception. This PR prevents an infinite loop by trying to add this construct in the `exception.values` field. It also introduces an hard upper limit of chained/nested transaction, to never run into an infinite loop. --- sentry_sdk/utils.py | 23 ++++ tests/test_exceptiongroup.py | 238 +++++++++++++++++++++++++++++++++++ 2 files changed, 261 insertions(+) diff --git a/sentry_sdk/utils.py b/sentry_sdk/utils.py index 407d9613d5..bb5aaf65e2 100644 --- a/sentry_sdk/utils.py +++ b/sentry_sdk/utils.py @@ -86,6 +86,13 @@ be affected by this limit if they have a custom recursion limit. """ +MAX_EXCEPTIONS = 25 +"""Maximum number of exceptions in a chain or group to send to Sentry. + +This is a sanity limit to avoid ending in an infinite loop of exceptions when the same exception is in the root and a leave +of the exception tree. +""" + def env_to_bool(value, *, strict=False): # type: (Any, Optional[bool]) -> bool | None @@ -823,6 +830,9 @@ def exceptions_from_error( parent_id = exception_id exception_id += 1 + if exception_id > MAX_EXCEPTIONS - 1: + return (exception_id, exceptions) + causing_exception = None exception_source = None @@ -853,6 +863,19 @@ def exceptions_from_error( exception_source = "__context__" causing_exception = exc_value.__context__ # type: ignore + if causing_exception: + # Some frameworks (e.g. FastAPI) wrap the causing exception in an + # ExceptionGroup that only contain one exception: the causing exception. + # This would lead to an infinite loop, so we skip the causing exception + # in this case. (because it is the same as the base_exception above) + if ( + BaseExceptionGroup is not None + and isinstance(causing_exception, BaseExceptionGroup) + and len(causing_exception.exceptions) == 1 + and causing_exception.exceptions[0] == exc_value + ): + causing_exception = None + if causing_exception: (exception_id, child_exceptions) = exceptions_from_error( exc_type=type(causing_exception), diff --git a/tests/test_exceptiongroup.py b/tests/test_exceptiongroup.py index 01ec0a78d4..b4512c9a79 100644 --- a/tests/test_exceptiongroup.py +++ b/tests/test_exceptiongroup.py @@ -1,4 +1,5 @@ import sys +from unittest import mock import pytest from sentry_sdk.utils import event_from_exception @@ -315,3 +316,240 @@ def test_simple_exception(): exception_values = event["exception"]["values"] assert exception_values == expected_exception_values + + +@minimum_python_311 +def test_exceptiongroup_recursion(): + exception_group = None + + my_error = RuntimeError("my error") + try: + try: + raise my_error + except RuntimeError: + raise ExceptionGroup( + "my_group", + [my_error], + ) + except ExceptionGroup as e: + exception_group = e + + (event, _) = event_from_exception( + exception_group, + client_options={ + "include_local_variables": True, + "include_source_context": True, + "max_value_length": 1024, + }, + mechanism={"type": "test_suite", "handled": False}, + ) + + values = event["exception"]["values"] + + # For this test the stacktrace and the module is not important + for x in values: + if "stacktrace" in x: + del x["stacktrace"] + if "module" in x: + del x["module"] + + # One ExceptionGroup, + # then the RuntimeError in the ExceptionGroup, + # and the original RuntimeError that was raised. + assert len(values) == 3 + + expected_values = [ + { + "mechanism": { + "exception_id": 2, + "handled": False, + "parent_id": 0, + "source": "exceptions[0]", + "type": "chained", + }, + "type": "RuntimeError", + "value": "my error", + }, + { + "mechanism": { + "exception_id": 1, + "handled": False, + "parent_id": 0, + "source": "__context__", + "type": "chained", + }, + "type": "RuntimeError", + "value": "my error", + }, + { + "mechanism": { + "exception_id": 0, + "handled": False, + "is_exception_group": True, + "type": "test_suite", + }, + "type": "ExceptionGroup", + "value": "my_group", + }, + ] + + assert values == expected_values + + +@minimum_python_311 +def test_exceptiongroup_recursion_multiple_levels(): + error = None + + my_error = RuntimeError("my error") + my_error_2 = RuntimeError("my error 2") + try: + try: + raise my_error + except RuntimeError: + try: + raise ExceptionGroup( + "my_group", + [my_error_2], + ) + except ExceptionGroup: + raise my_error + + except RuntimeError as e: + error = e + + (event, _) = event_from_exception( + error, + client_options={ + "include_local_variables": True, + "include_source_context": True, + "max_value_length": 1024, + }, + mechanism={"type": "test_suite", "handled": False}, + ) + + values = event["exception"]["values"] + + # For this test the stacktrace and the module is not important + for x in values: + if "stacktrace" in x: + del x["stacktrace"] + if "module" in x: + del x["module"] + + # One ExceptionGroup, + # then the RuntimeError in the ExceptionGroup, + # and the original RuntimeError that was raised. + assert len(values) == 3 + + expected_values = [ + { + "mechanism": { + "type": "chained", + "handled": False, + "exception_id": 2, + "source": "exceptions[0]", + "parent_id": 1, + }, + "type": "RuntimeError", + "value": "my error 2", + }, + { + "mechanism": { + "type": "chained", + "handled": False, + "exception_id": 1, + "source": "__context__", + "parent_id": 0, + "is_exception_group": True, + }, + "type": "ExceptionGroup", + "value": "my_group", + }, + { + "mechanism": { + "type": "test_suite", + "handled": False, + "exception_id": 0, + }, + "type": "RuntimeError", + "value": "my error", + }, + ] + + assert values == expected_values + + +@minimum_python_311 +def test_too_many_exceptions(): + with mock.patch("sentry_sdk.utils.MAX_EXCEPTIONS", 3): + error = None + try: + try: + raise RuntimeError("my error 1") + except RuntimeError: + try: + raise RuntimeError("my error 2") + except RuntimeError: + try: + raise RuntimeError("my error 3") + except RuntimeError: + raise RuntimeError("my error 4") + except RuntimeError as e: + error = e + + (event, _) = event_from_exception( + error, + client_options={ + "include_local_variables": True, + "include_source_context": True, + "max_value_length": 1024, + }, + mechanism={"type": "test_suite", "handled": False}, + ) + + values = event["exception"]["values"] + + # For this test the stacktrace and the module is not important + for x in values: + if "stacktrace" in x: + del x["stacktrace"] + if "module" in x: + del x["module"] + + assert len(values) == 3 + + expected_values = [ + { + "mechanism": { + "type": "chained", + "handled": False, + "exception_id": 2, + "source": "__context__", + "parent_id": 1, + }, + "type": "RuntimeError", + "value": "my error 2", + }, + { + "mechanism": { + "type": "chained", + "handled": False, + "exception_id": 1, + "source": "__context__", + "parent_id": 0, + }, + "type": "RuntimeError", + "value": "my error 3", + }, + { + "mechanism": { + "type": "test_suite", + "handled": False, + "exception_id": 0, + }, + "type": "RuntimeError", + "value": "my error 4", + }, + ] + + assert values == expected_values From 83e99e152d301e0c7327b7489a3ed59955094231 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Mon, 5 May 2025 14:46:12 +0200 Subject: [PATCH 244/244] Add top level API for `add_atttachment`. (#4360) This PR also needs an update of the docs to make sure people use the top level API. See this docs issue: https://github.com/getsentry/sentry-docs/issues/13592 --- MIGRATION_GUIDE.md | 2 ++ docs/api.rst | 1 + sentry_sdk/__init__.py | 1 + sentry_sdk/api.py | 15 +++++++++++++++ 4 files changed, 19 insertions(+) diff --git a/MIGRATION_GUIDE.md b/MIGRATION_GUIDE.md index 8e375c5d20..5c2402e07f 100644 --- a/MIGRATION_GUIDE.md +++ b/MIGRATION_GUIDE.md @@ -6,6 +6,8 @@ Looking to upgrade from Sentry SDK 2.x to 3.x? Here's a comprehensive list of wh ### New Features +- Added `add_attachment()` as a top level API, so you can do now: `sentry_sdk.add_attachment(...)` (up until now it was only available on the `Scope`) + ### Changed - The SDK now supports Python 3.7 and higher. diff --git a/docs/api.rst b/docs/api.rst index 95acc70455..f79eed0cbb 100644 --- a/docs/api.rst +++ b/docs/api.rst @@ -25,6 +25,7 @@ Capturing Data Enriching Events ================ +.. autofunction:: sentry_sdk.api.add_attachment .. autofunction:: sentry_sdk.api.add_breadcrumb .. autofunction:: sentry_sdk.api.set_context .. autofunction:: sentry_sdk.api.set_extra diff --git a/sentry_sdk/__init__.py b/sentry_sdk/__init__.py index b35c446dc0..55f2256e3b 100644 --- a/sentry_sdk/__init__.py +++ b/sentry_sdk/__init__.py @@ -16,6 +16,7 @@ "integrations", # From sentry_sdk.api "init", + "add_attachment", "add_breadcrumb", "capture_event", "capture_exception", diff --git a/sentry_sdk/api.py b/sentry_sdk/api.py index b8a2498d5d..418afe6a8f 100644 --- a/sentry_sdk/api.py +++ b/sentry_sdk/api.py @@ -39,6 +39,7 @@ # When changing this, update __all__ in __init__.py too __all__ = [ "init", + "add_attachment", "add_breadcrumb", "capture_event", "capture_exception", @@ -171,6 +172,20 @@ def capture_exception( return get_current_scope().capture_exception(error, scope=scope, **scope_kwargs) +@scopemethod +def add_attachment( + bytes=None, # type: Union[None, bytes, Callable[[], bytes]] + filename=None, # type: Optional[str] + path=None, # type: Optional[str] + content_type=None, # type: Optional[str] + add_to_transactions=False, # type: bool +): + # type: (...) -> None + return get_isolation_scope().add_attachment( + bytes, filename, path, content_type, add_to_transactions + ) + + @scopemethod def add_breadcrumb( crumb=None, # type: Optional[sentry_sdk._types.Breadcrumb]