Skip to content

Commit

Permalink
Merge branch 'main' into evan.li/release-ragas
Browse files Browse the repository at this point in the history
  • Loading branch information
lievan authored Jan 29, 2025
2 parents 8e4fd41 + cb41f8e commit 8d5455f
Show file tree
Hide file tree
Showing 44 changed files with 481 additions and 350 deletions.
2 changes: 2 additions & 0 deletions .github/CODEOWNERS
Original file line number Diff line number Diff line change
Expand Up @@ -166,12 +166,14 @@ ddtrace/internal/remoteconfig @DataDog/remote-config @DataDog/apm-core-pyt
tests/internal/remoteconfig @DataDog/remote-config @DataDog/apm-core-python

# API SDK
ddtrace/trace/ @DataDog/apm-sdk-api-python
ddtrace/_trace/ @DataDog/apm-sdk-api-python
ddtrace/opentelemetry/ @DataDog/apm-sdk-api-python
ddtrace/internal/opentelemetry @DataDog/apm-sdk-api-python
ddtrace/opentracer/ @DataDog/apm-sdk-api-python
ddtrace/propagation/ @DataDog/apm-sdk-api-python
ddtrace/filters.py @DataDog/apm-sdk-api-python
ddtrace/provider.py @DataDog/apm-sdk-api-python
ddtrace/pin.py @DataDog/apm-sdk-api-python
ddtrace/sampler.py @DataDog/apm-sdk-api-python
ddtrace/sampling_rule.py @DataDog/apm-sdk-api-python
Expand Down
5 changes: 4 additions & 1 deletion .github/workflows/build_python_3.yml
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,8 @@ jobs:
cibuildwheel --print-build-identifiers --platform linux --arch x86_64,i686 | jq -cR '{only: ., os: "ubuntu-latest"}' \
&& cibuildwheel --print-build-identifiers --platform linux --arch aarch64 | jq -cR '{only: ., os: "arm-4core-linux"}' \
&& cibuildwheel --print-build-identifiers --platform windows --arch AMD64,x86 | grep -v 313 | jq -cR '{only: ., os: "windows-latest"}' \
&& cibuildwheel --print-build-identifiers --platform macos --arch x86_64,universal2 | jq -cR '{only: ., os: "macos-13"}'
&& cibuildwheel --print-build-identifiers --platform macos --arch x86_64 | jq -cR '{only: ., os: "macos-13"}' \
&& cibuildwheel --print-build-identifiers --platform macos --arch arm64 | jq -cR '{only: ., os: "macos-latest"}'
} | jq -sc
)
echo $MATRIX_INCLUDE
Expand Down Expand Up @@ -112,6 +113,7 @@ jobs:
choco install -y 7zip &&
7z d -r "{wheel}" *.c *.cpp *.cc *.h *.hpp *.pyx &&
move "{wheel}" "{dest_dir}"
CIBW_TEST_COMMAND: "python {project}/tests/smoke_test.py"
# DEV: Uncomment to debug MacOS
# CIBW_BUILD_VERBOSITY_MACOS: 3

Expand Down Expand Up @@ -152,6 +154,7 @@ jobs:
choco install -y 7zip &&
7z d -r "{wheel}" *.c *.cpp *.cc *.h *.hpp *.pyx &&
move "{wheel}" "{dest_dir}"
CIBW_TEST_COMMAND: "python {project}/tests/smoke_test.py"
# DEV: Uncomment to debug MacOS
# CIBW_BUILD_VERBOSITY_MACOS: 3

Expand Down
27 changes: 27 additions & 0 deletions .github/workflows/check_safe_main_merge.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
name: Check for Safe main Merge

on:
pull_request:
branches:
- '3.x-staging'

jobs:
check-merge:
runs-on: ubuntu-latest
steps:
# Step 1: Checkout the repository
- name: Checkout repository
uses: actions/checkout@v4

# Step 2: Fetch the main branch
- name: Fetch main branch
run: git fetch origin main

# Step 3: Attempt to merge
- name: Check merge conflicts
run: |
git merge --no-commit --no-ff origin/main || exit 1
# Step 4: Clean up the merge (optional)
- name: Abort merge
if: failure()
run: git merge --abort
1 change: 1 addition & 0 deletions .gitlab/benchmarks.yml
Original file line number Diff line number Diff line change
Expand Up @@ -96,6 +96,7 @@ benchmark-serverless:
tags: ["arch:amd64"]
when: on_success
needs: [ "benchmark-serverless-trigger" ]
allow_failure: true
script:
- git clone https://gitlab-ci-token:${CI_JOB_TOKEN}@gitlab.ddbuild.io/DataDog/serverless-tools.git ./serverless-tools && cd ./serverless-tools
- ./ci/check_trigger_status.sh
Expand Down
15 changes: 15 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,21 @@ Changelogs for versions not listed here can be found at https://github.com/DataD

---

## 2.19.2
### Bug Fixes

- Tracing
- celery: Fixes an issue where `celery.apply` spans from Celery prerun got closed too soon leading to span tags being missing.
- openai: Fixes a patching issue where asynchronous moderation endpoint calls resulted in coroutine scheduling errors.
- openai: Ensures the OpenAI integration is compatible with Python versions 3.12 and 3.13.
- vertexai: Resolves an issue with `chat.send_message()` where the content keyword argument was not parsed correctly.
- LLM Observability
- This fix resolves an issue where annotating a span with non latin-1 (but valid utf-8) input/output values resulted in encoding errors.
- Lib-Injection
- Fixes incorrect telemetry data payload format.

---

## 2.19.1
### Bug Fixes

Expand Down
56 changes: 54 additions & 2 deletions ddtrace/_trace/trace_handlers.py
Original file line number Diff line number Diff line change
Expand Up @@ -109,11 +109,14 @@ def _get_parameters_for_new_span_directly_from_context(ctx: core.ExecutionContex
def _start_span(ctx: core.ExecutionContext, call_trace: bool = True, **kwargs) -> "Span":
span_kwargs = _get_parameters_for_new_span_directly_from_context(ctx)
call_trace = ctx.get_item("call_trace", call_trace)
tracer = (ctx.get_item("middleware") or ctx["pin"]).tracer
tracer = ctx.get_item("tracer") or (ctx.get_item("middleware") or ctx["pin"]).tracer
distributed_headers_config = ctx.get_item("distributed_headers_config")
if distributed_headers_config:
trace_utils.activate_distributed_headers(
tracer, int_config=distributed_headers_config, request_headers=ctx["distributed_headers"]
tracer,
int_config=distributed_headers_config,
request_headers=ctx["distributed_headers"],
override=ctx.get_item("distributed_headers_config_override"),
)
distributed_context = ctx.get_item("distributed_context")
if distributed_context and not call_trace:
Expand All @@ -126,6 +129,42 @@ def _start_span(ctx: core.ExecutionContext, call_trace: bool = True, **kwargs) -
return span


def _set_web_frameworks_tags(ctx, span, int_config):
span.set_tag_str(COMPONENT, int_config.integration_name)
span.set_tag_str(SPAN_KIND, SpanKind.SERVER)
span.set_tag(_SPAN_MEASURED_KEY)

analytics_enabled = ctx.get_item("analytics_enabled")
analytics_sample_rate = ctx.get_item("analytics_sample_rate", True)

# Configure trace search sample rate
if (config._analytics_enabled and analytics_enabled is not False) or analytics_enabled is True:
span.set_tag(_ANALYTICS_SAMPLE_RATE_KEY, analytics_sample_rate)


def _on_web_framework_start_request(ctx, int_config):
request_span = ctx.get_item("req_span")
_set_web_frameworks_tags(ctx, request_span, int_config)


def _on_web_framework_finish_request(
span, int_config, method, url, status_code, query, req_headers, res_headers, route, finish
):
trace_utils.set_http_meta(
span=span,
integration_config=int_config,
method=method,
url=url,
status_code=status_code,
query=query,
request_headers=req_headers,
response_headers=res_headers,
route=route,
)
if finish:
span.finish()


def _on_traced_request_context_started_flask(ctx):
current_span = ctx["pin"].tracer.current_span()
if not ctx["pin"].enabled or not current_span:
Expand Down Expand Up @@ -761,6 +800,10 @@ def listen():
core.on("azure.functions.request_call_modifier", _on_azure_functions_request_span_modifier)
core.on("azure.functions.start_response", _on_azure_functions_start_response)

# web frameworks general handlers
core.on("web.request.start", _on_web_framework_start_request)
core.on("web.request.finish", _on_web_framework_finish_request)

core.on("test_visibility.enable", _on_test_visibility_enable)
core.on("test_visibility.disable", _on_test_visibility_disable)
core.on("test_visibility.is_enabled", _on_test_visibility_is_enabled, "is_enabled")
Expand All @@ -769,6 +812,14 @@ def listen():
core.on("rq.queue.enqueue_job", _propagate_context)

for context_name in (
# web frameworks
"aiohttp.request",
"bottle.request",
"cherrypy.request",
"falcon.request",
"molten.request",
"pyramid.request",
"sanic.request",
"flask.call",
"flask.jsonify",
"flask.render_template",
Expand All @@ -779,6 +830,7 @@ def listen():
"django.template.render",
"django.process_exception",
"django.func.wrapped",
# non web frameworks
"botocore.instrumented_api_call",
"botocore.instrumented_lib_function",
"botocore.patched_kinesis_api_call",
Expand Down
3 changes: 3 additions & 0 deletions ddtrace/appsec/_ddwaf/ddwaf_types.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,9 +22,12 @@

if system() == "Linux":
try:
asm_config._bypass_instrumentation_for_waf = True
ctypes.CDLL(ctypes.util.find_library("rt"), mode=ctypes.RTLD_GLOBAL)
except Exception: # nosec
pass
finally:
asm_config._bypass_instrumentation_for_waf = False

ARCHI = machine().lower()

Expand Down
26 changes: 14 additions & 12 deletions ddtrace/appsec/_processor.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,13 +4,19 @@
from json.decoder import JSONDecodeError
import os
import os.path
from typing import TYPE_CHECKING
from typing import Any
from typing import Dict
from typing import List
from typing import Optional
from typing import Set
from typing import Tuple
from typing import Union


if TYPE_CHECKING:
import ddtrace.appsec._ddwaf as ddwaf

import weakref

from ddtrace._trace.processor import SpanProcessor
Expand Down Expand Up @@ -167,14 +173,17 @@ def __post_init__(self) -> None:
def delayed_init(self) -> None:
try:
if self._rules is not None and not hasattr(self, "_ddwaf"):
self._ddwaf = ddwaf.DDWaf(
from ddtrace.appsec._ddwaf import DDWaf # noqa: E402
import ddtrace.appsec._metrics as metrics # noqa: E402

self.metrics = metrics
self._ddwaf = DDWaf(
self._rules, self.obfuscation_parameter_key_regexp, self.obfuscation_parameter_value_regexp
)
_set_waf_init_metric(self._ddwaf.info)
self.metrics._set_waf_init_metric(self._ddwaf.info)
except Exception:
# Partial of DDAS-0005-00
log.warning("[DDAS-0005-00] WAF initialization failed")
raise
self._update_required()

def _update_required(self):
Expand All @@ -193,7 +202,7 @@ def _update_rules(self, new_rules: Dict[str, Any]) -> bool:
if asm_config._asm_static_rule_file is not None:
return result
result = self._ddwaf.update_rules(new_rules)
_set_waf_updates_metric(self._ddwaf.info)
self.metrics._set_waf_updates_metric(self._ddwaf.info)
self._update_required()
return result

Expand Down Expand Up @@ -241,7 +250,7 @@ def waf_callable(custom_data=None, **kwargs):
return self._waf_action(span._local_root or span, ctx, custom_data, **kwargs)

_asm_request_context.set_waf_callback(waf_callable)
_asm_request_context.add_context_callback(_set_waf_request_metrics)
_asm_request_context.add_context_callback(self.metrics._set_waf_request_metrics)
if headers is not None:
_asm_request_context.set_waf_address(SPAN_DATA_NAMES.REQUEST_HEADERS_NO_COOKIES, headers)
_asm_request_context.set_waf_address(
Expand Down Expand Up @@ -436,10 +445,3 @@ def on_span_finish(self, span: Span) -> None:
del self._span_to_waf_ctx[s]
except Exception: # nosec B110
pass


# load waf at the end only to avoid possible circular imports with gevent
import ddtrace.appsec._ddwaf as ddwaf # noqa: E402
from ddtrace.appsec._metrics import _set_waf_init_metric # noqa: E402
from ddtrace.appsec._metrics import _set_waf_request_metrics # noqa: E402
from ddtrace.appsec._metrics import _set_waf_updates_metric # noqa: E402
Loading

0 comments on commit 8d5455f

Please sign in to comment.