2025-12-01

This commit is contained in:
2026-03-17 14:58:51 -06:00
parent 183e865f8b
commit 4b82b57113
6846 changed files with 954887 additions and 162606 deletions
@@ -8,6 +8,7 @@ from importlib import import_module
from typing import TYPE_CHECKING, List, Dict, cast, overload
import warnings
import sentry_sdk
from sentry_sdk._compat import PY37, check_uwsgi_thread_support
from sentry_sdk.utils import (
AnnotatedValue,
@@ -22,11 +23,16 @@ from sentry_sdk.utils import (
handle_in_app,
is_gevent,
logger,
get_before_send_log,
get_before_send_metric,
has_logs_enabled,
has_metrics_enabled,
)
from sentry_sdk.serializer import serialize
from sentry_sdk.tracing import trace
from sentry_sdk.transport import BaseHttpTransport, make_transport
from sentry_sdk.consts import (
SPANDATA,
DEFAULT_MAX_VALUE_LENGTH,
DEFAULT_OPTIONS,
INSTRUMENTER,
@@ -34,6 +40,7 @@ from sentry_sdk.consts import (
ClientConstructor,
)
from sentry_sdk.integrations import _DEFAULT_INTEGRATIONS, setup_integrations
from sentry_sdk.integrations.dedupe import DedupeIntegration
from sentry_sdk.sessions import SessionFlusher
from sentry_sdk.envelope import Envelope
from sentry_sdk.profiler.continuous_profiler import setup_continuous_profiler
@@ -44,7 +51,6 @@ from sentry_sdk.profiler.transaction_profiler import (
)
from sentry_sdk.scrubber import EventScrubber
from sentry_sdk.monitor import Monitor
from sentry_sdk.spotlight import setup_spotlight
if TYPE_CHECKING:
from typing import Any
@@ -55,13 +61,14 @@ if TYPE_CHECKING:
from typing import Union
from typing import TypeVar
from sentry_sdk._types import Event, Hint, SDKInfo
from sentry_sdk._types import Event, Hint, SDKInfo, Log, Metric
from sentry_sdk.integrations import Integration
from sentry_sdk.metrics import MetricsAggregator
from sentry_sdk.scope import Scope
from sentry_sdk.session import Session
from sentry_sdk.spotlight import SpotlightClient
from sentry_sdk.transport import Transport
from sentry_sdk._log_batcher import LogBatcher
from sentry_sdk._metrics_batcher import MetricsBatcher
I = TypeVar("I", bound=Integration) # noqa: E741
@@ -107,7 +114,7 @@ def _get_options(*args, **kwargs):
rv["environment"] = os.environ.get("SENTRY_ENVIRONMENT") or "production"
if rv["debug"] is None:
rv["debug"] = env_to_bool(os.environ.get("SENTRY_DEBUG", "False"), strict=True)
rv["debug"] = env_to_bool(os.environ.get("SENTRY_DEBUG"), strict=True) or False
if rv["server_name"] is None and hasattr(socket, "gethostname"):
rv["server_name"] = socket.gethostname()
@@ -139,6 +146,11 @@ def _get_options(*args, **kwargs):
)
rv["socket_options"] = None
if rv["keep_alive"] is None:
rv["keep_alive"] = (
env_to_bool(os.environ.get("SENTRY_KEEP_ALIVE"), strict=True) or False
)
if rv["enable_tracing"] is not None:
warnings.warn(
"The `enable_tracing` parameter is deprecated. Please use `traces_sample_rate` instead.",
@@ -168,13 +180,12 @@ class BaseClient:
def __init__(self, options=None):
# type: (Optional[Dict[str, Any]]) -> None
self.options = (
options if options is not None else DEFAULT_OPTIONS
) # type: Dict[str, Any]
self.options = options if options is not None else DEFAULT_OPTIONS # type: Dict[str, Any]
self.transport = None # type: Optional[Transport]
self.monitor = None # type: Optional[Monitor]
self.metrics_aggregator = None # type: Optional[MetricsAggregator]
self.log_batcher = None # type: Optional[LogBatcher]
self.metrics_batcher = None # type: Optional[MetricsBatcher]
def __getstate__(self, *args, **kwargs):
# type: (*Any, **Any) -> Any
@@ -206,6 +217,14 @@ class BaseClient:
# type: (*Any, **Any) -> Optional[str]
return None
def _capture_log(self, log):
# type: (Log) -> None
pass
def _capture_metric(self, metric):
# type: (Metric) -> None
pass
def capture_session(self, *args, **kwargs):
# type: (*Any, **Any) -> None
return None
@@ -348,25 +367,19 @@ class _Client(BaseClient):
self.session_flusher = SessionFlusher(capture_func=_capture_envelope)
self.metrics_aggregator = None # type: Optional[MetricsAggregator]
experiments = self.options.get("_experiments", {})
if experiments.get("enable_metrics", True):
# Context vars are not working correctly on Python <=3.6
# with gevent.
metrics_supported = not is_gevent() or PY37
if metrics_supported:
from sentry_sdk.metrics import MetricsAggregator
self.log_batcher = None
self.metrics_aggregator = MetricsAggregator(
capture_func=_capture_envelope,
enable_code_locations=bool(
experiments.get("metric_code_locations", True)
),
)
else:
logger.info(
"Metrics not supported on Python 3.6 and lower with gevent."
)
if has_logs_enabled(self.options):
from sentry_sdk._log_batcher import LogBatcher
self.log_batcher = LogBatcher(capture_func=_capture_envelope)
self.metrics_batcher = None
if has_metrics_enabled(self.options):
from sentry_sdk._metrics_batcher import MetricsBatcher
self.metrics_batcher = MetricsBatcher(capture_func=_capture_envelope)
max_request_body_size = ("always", "never", "small", "medium")
if self.options["max_request_body_size"] not in max_request_body_size:
@@ -409,7 +422,17 @@ class _Client(BaseClient):
)
if self.options.get("spotlight"):
# This is intentionally here to prevent setting up spotlight
# stuff we don't need unless spotlight is explicitly enabled
from sentry_sdk.spotlight import setup_spotlight
self.spotlight = setup_spotlight(self.options)
if not self.options["dsn"]:
sample_all = lambda *_args, **_kwargs: 1.0
self.options["send_default_pii"] = True
self.options["error_sampler"] = sample_all
self.options["traces_sampler"] = sample_all
self.options["profiles_sampler"] = sample_all
sdk_name = get_sdk_name(list(self.integrations.keys()))
SDK_INFO["name"] = sdk_name
@@ -437,7 +460,7 @@ class _Client(BaseClient):
if (
self.monitor
or self.metrics_aggregator
or self.log_batcher
or has_profiling_enabled(self.options)
or isinstance(self.transport, BaseHttpTransport)
):
@@ -461,11 +484,7 @@ class _Client(BaseClient):
Returns whether the client should send default PII (Personally Identifiable Information) data to Sentry.
"""
result = self.options.get("send_default_pii")
if result is None:
result = not self.options["dsn"] and self.spotlight is not None
return result
return self.options.get("send_default_pii") or False
@property
def dsn(self):
@@ -482,12 +501,14 @@ class _Client(BaseClient):
# type: (...) -> Optional[Event]
previous_total_spans = None # type: Optional[int]
previous_total_breadcrumbs = None # type: Optional[int]
if event.get("timestamp") is None:
event["timestamp"] = datetime.now(timezone.utc)
is_transaction = event.get("type") == "transaction"
if scope is not None:
is_transaction = event.get("type") == "transaction"
spans_before = len(cast(List[Dict[str, object]], event.get("spans", [])))
event_ = scope.apply_to_event(event, hint, self.options)
@@ -518,9 +539,20 @@ class _Client(BaseClient):
dropped_spans = event.pop("_dropped_spans", 0) + spans_delta # type: int
if dropped_spans > 0:
previous_total_spans = spans_before + dropped_spans
if scope._n_breadcrumbs_truncated > 0:
breadcrumbs = event.get("breadcrumbs", {})
values = (
breadcrumbs.get("values", [])
if not isinstance(breadcrumbs, AnnotatedValue)
else []
)
previous_total_breadcrumbs = (
len(values) + scope._n_breadcrumbs_truncated
)
if (
self.options["attach_stacktrace"]
not is_transaction
and self.options["attach_stacktrace"]
and "exception" not in event
and "stacktrace" not in event
and "threads" not in event
@@ -566,10 +598,30 @@ class _Client(BaseClient):
if event_scrubber:
event_scrubber.scrub_event(event)
if scope is not None and scope._gen_ai_original_message_count:
spans = event.get("spans", []) # type: List[Dict[str, Any]] | AnnotatedValue
if isinstance(spans, list):
for span in spans:
span_id = span.get("span_id", None)
span_data = span.get("data", {})
if (
span_id
and span_id in scope._gen_ai_original_message_count
and SPANDATA.GEN_AI_REQUEST_MESSAGES in span_data
):
span_data[SPANDATA.GEN_AI_REQUEST_MESSAGES] = AnnotatedValue(
span_data[SPANDATA.GEN_AI_REQUEST_MESSAGES],
{"len": scope._gen_ai_original_message_count[span_id]},
)
if previous_total_spans is not None:
event["spans"] = AnnotatedValue(
event.get("spans", []), {"len": previous_total_spans}
)
if previous_total_breadcrumbs is not None:
event["breadcrumbs"] = AnnotatedValue(
event.get("breadcrumbs", {"values": []}),
{"len": previous_total_breadcrumbs},
)
# Postprocess the event here so that annotated types do
# generally not surface in before_send
@@ -599,6 +651,14 @@ class _Client(BaseClient):
self.transport.record_lost_event(
"before_send", data_category="error"
)
# If this is an exception, reset the DedupeIntegration. It still
# remembers the dropped exception as the last exception, meaning
# that if the same exception happens again and is not dropped
# in before_send, it'd get dropped by DedupeIntegration.
if event.get("exception"):
DedupeIntegration.reset_last_seen()
event = new_event
before_send_transaction = self.options["before_send_transaction"]
@@ -740,6 +800,8 @@ class _Client(BaseClient):
if exceptions:
errored = True
for error in exceptions:
if isinstance(error, AnnotatedValue):
error = error.value or {}
mechanism = error.get("mechanism")
if isinstance(mechanism, Mapping) and mechanism.get("handled") is False:
crashed = True
@@ -847,8 +909,135 @@ class _Client(BaseClient):
return return_value
def _capture_log(self, log):
# type: (Optional[Log]) -> None
if not has_logs_enabled(self.options) or log is None:
return
current_scope = sentry_sdk.get_current_scope()
isolation_scope = sentry_sdk.get_isolation_scope()
log["attributes"]["sentry.sdk.name"] = SDK_INFO["name"]
log["attributes"]["sentry.sdk.version"] = SDK_INFO["version"]
server_name = self.options.get("server_name")
if server_name is not None and SPANDATA.SERVER_ADDRESS not in log["attributes"]:
log["attributes"][SPANDATA.SERVER_ADDRESS] = server_name
environment = self.options.get("environment")
if environment is not None and "sentry.environment" not in log["attributes"]:
log["attributes"]["sentry.environment"] = environment
release = self.options.get("release")
if release is not None and "sentry.release" not in log["attributes"]:
log["attributes"]["sentry.release"] = release
span = current_scope.span
if span is not None and "sentry.trace.parent_span_id" not in log["attributes"]:
log["attributes"]["sentry.trace.parent_span_id"] = span.span_id
if log.get("trace_id") is None:
transaction = current_scope.transaction
propagation_context = isolation_scope.get_active_propagation_context()
if transaction is not None:
log["trace_id"] = transaction.trace_id
elif propagation_context is not None:
log["trace_id"] = propagation_context.trace_id
# The user, if present, is always set on the isolation scope.
if isolation_scope._user is not None:
for log_attribute, user_attribute in (
("user.id", "id"),
("user.name", "username"),
("user.email", "email"),
):
if (
user_attribute in isolation_scope._user
and log_attribute not in log["attributes"]
):
log["attributes"][log_attribute] = isolation_scope._user[
user_attribute
]
# If debug is enabled, log the log to the console
debug = self.options.get("debug", False)
if debug:
logger.debug(
f"[Sentry Logs] [{log.get('severity_text')}] {log.get('body')}"
)
before_send_log = get_before_send_log(self.options)
if before_send_log is not None:
log = before_send_log(log, {})
if log is None:
return
if self.log_batcher:
self.log_batcher.add(log)
def _capture_metric(self, metric):
# type: (Optional[Metric]) -> None
if not has_metrics_enabled(self.options) or metric is None:
return
isolation_scope = sentry_sdk.get_isolation_scope()
metric["attributes"]["sentry.sdk.name"] = SDK_INFO["name"]
metric["attributes"]["sentry.sdk.version"] = SDK_INFO["version"]
environment = self.options.get("environment")
if environment is not None and "sentry.environment" not in metric["attributes"]:
metric["attributes"]["sentry.environment"] = environment
release = self.options.get("release")
if release is not None and "sentry.release" not in metric["attributes"]:
metric["attributes"]["sentry.release"] = release
span = sentry_sdk.get_current_span()
metric["trace_id"] = "00000000-0000-0000-0000-000000000000"
if span:
metric["trace_id"] = span.trace_id
metric["span_id"] = span.span_id
else:
propagation_context = isolation_scope.get_active_propagation_context()
if propagation_context and propagation_context.trace_id:
metric["trace_id"] = propagation_context.trace_id
if isolation_scope._user is not None:
for metric_attribute, user_attribute in (
("user.id", "id"),
("user.name", "username"),
("user.email", "email"),
):
if (
user_attribute in isolation_scope._user
and metric_attribute not in metric["attributes"]
):
metric["attributes"][metric_attribute] = isolation_scope._user[
user_attribute
]
debug = self.options.get("debug", False)
if debug:
logger.debug(
f"[Sentry Metrics] [{metric.get('type')}] {metric.get('name')}: {metric.get('value')}"
)
before_send_metric = get_before_send_metric(self.options)
if before_send_metric is not None:
metric = before_send_metric(metric, {})
if metric is None:
return
if self.metrics_batcher:
self.metrics_batcher.add(metric)
def capture_session(
self, session # type: Session
self,
session, # type: Session
):
# type: (...) -> None
if not session.release:
@@ -869,7 +1058,8 @@ class _Client(BaseClient):
...
def get_integration(
self, name_or_class # type: Union[str, Type[Integration]]
self,
name_or_class, # type: Union[str, Type[Integration]]
):
# type: (...) -> Optional[Integration]
"""Returns the integration for this client by name or class.
@@ -897,8 +1087,10 @@ class _Client(BaseClient):
if self.transport is not None:
self.flush(timeout=timeout, callback=callback)
self.session_flusher.kill()
if self.metrics_aggregator is not None:
self.metrics_aggregator.kill()
if self.log_batcher is not None:
self.log_batcher.kill()
if self.metrics_batcher is not None:
self.metrics_batcher.kill()
if self.monitor:
self.monitor.kill()
self.transport.kill()
@@ -921,8 +1113,10 @@ class _Client(BaseClient):
if timeout is None:
timeout = self.options["shutdown_timeout"]
self.session_flusher.flush()
if self.metrics_aggregator is not None:
self.metrics_aggregator.flush()
if self.log_batcher is not None:
self.log_batcher.flush()
if self.metrics_batcher is not None:
self.metrics_batcher.flush()
self.transport.flush(timeout=timeout, callback=callback)
def __enter__(self):