Skip to content

Commit

Permalink
Merge branch 'main' into sanchda/fix_ddup_tags
Browse files Browse the repository at this point in the history
  • Loading branch information
sanchda committed Apr 30, 2024
2 parents 3b805e3 + 97af079 commit 17acfed
Show file tree
Hide file tree
Showing 150 changed files with 7,662 additions and 2,012 deletions.
17 changes: 1 addition & 16 deletions .circleci/config.templ.yml
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ mysql_image: &mysql_image mysql:5.7@sha256:03b6dcedf5a2754da00e119e2cc6094ed3c88
postgres_image: &postgres_image postgres:12-alpine@sha256:c6704f41eb84be53d5977cb821bf0e5e876064b55eafef1e260c2574de40ad9a
mongo_image: &mongo_image mongo:3.6@sha256:19c11a8f1064fd2bb713ef1270f79a742a184cd57d9bb922efdd2a8eca514af8
httpbin_image: &httpbin_image kennethreitz/httpbin@sha256:2c7abc4803080c22928265744410173b6fea3b898872c01c5fd0f0f9df4a59fb
vertica_image: &vertica_image sumitchawla/vertica:latest
vertica_image: &vertica_image vertica/vertica-ce:latest
rabbitmq_image: &rabbitmq_image rabbitmq:3.7-alpine
testagent_image: &testagent_image ghcr.io/datadog/dd-apm-test-agent/ddapm-test-agent:v1.16.0

Expand Down Expand Up @@ -1227,21 +1227,6 @@ jobs:
snapshot: true
docker_services: "httpbin_local"

vertica:
<<: *contrib_job
docker:
- image: *ddtrace_dev_image
- *testagent
- image: *vertica_image
environment:
- VP_TEST_USER=dbadmin
- VP_TEST_PASSWORD=abc123
- VP_TEST_DATABASE=docker
steps:
- run_test:
wait: vertica
pattern: 'vertica'

wsgi:
<<: *machine_executor
steps:
Expand Down
14 changes: 7 additions & 7 deletions .github/CODEOWNERS
Validating CODEOWNERS rules …
Original file line number Diff line number Diff line change
Expand Up @@ -4,13 +4,13 @@
* @DataDog/apm-core-python

# Framework Integrations
ddtrace/ext/ @DataDog/apm-core-python @DataDog/apm-framework-integrations
ddtrace/contrib/ @DataDog/apm-core-python @DataDog/apm-framework-integrations
ddtrace/internal/schema/ @DataDog/apm-core-python @DataDog/apm-framework-integrations
tests/contrib/ @DataDog/apm-core-python @DataDog/apm-framework-integrations
tests/internal/peer_service @DataDog/apm-core-python @DataDog/apm-framework-integrations
tests/internal/service_name @DataDog/apm-core-python @DataDog/apm-framework-integrations
tests/contrib/grpc @DataDog/apm-framework-integrations @DataDog/asm-python
ddtrace/ext/ @DataDog/apm-core-python @DataDog/apm-idm-python
ddtrace/contrib/ @DataDog/apm-core-python @DataDog/apm-idm-python
ddtrace/internal/schema/ @DataDog/apm-core-python @DataDog/apm-idm-python
tests/contrib/ @DataDog/apm-core-python @DataDog/apm-idm-python
tests/internal/peer_service @DataDog/apm-core-python @DataDog/apm-idm-python
tests/internal/service_name @DataDog/apm-core-python @DataDog/apm-idm-python
tests/contrib/grpc @DataDog/apm-idm-python @DataDog/asm-python

# Files which can be approved by anyone
# DEV: This helps not requiring apm-core-python to review new files added
Expand Down
12 changes: 0 additions & 12 deletions .github/workflows/build_deploy.yml
Original file line number Diff line number Diff line change
Expand Up @@ -9,18 +9,6 @@ on:
# before merging/releasing
- build_deploy*
pull_request:
paths:
- ".github/workflows/build_deploy.yml"
- ".github/workflows/build_python_3.yml"
- "setup.py"
- "setup.cfg"
- "pyproject.toml"
- "**.c"
- "**.h"
- "**.cpp"
- "**.hpp"
- "**.pyx"
- "ddtrace/vendor/**"
release:
types:
- published
Expand Down
51 changes: 49 additions & 2 deletions .github/workflows/build_python_3.yml
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ jobs:
include:
- os: ubuntu-latest
archs: x86_64 i686
- os: ubuntu-latest
- os: arm-4core-linux
archs: aarch64
- os: windows-latest
archs: AMD64 x86
Expand All @@ -34,17 +34,63 @@ jobs:
fetch-depth: 0

- uses: actions/setup-python@v4
if: matrix.os != 'arm-4core-linux'
name: Install Python
with:
python-version: '3.8'

- name: Install docker and pipx
if: matrix.os == 'arm-4core-linux'
# The ARM64 Ubuntu has less things installed by default
# We need docker, pip and venv for cibuildwheel
# acl allows us to use docker in the same session
run: |
curl -fsSL https://get.docker.com -o get-docker.sh
sudo sh get-docker.sh
sudo usermod -a -G docker $USER
sudo apt install -y acl python3.10-venv python3-pip
sudo setfacl --modify user:runner:rw /var/run/docker.sock
python3 -m pip install pipx
- name: Set up QEMU
if: runner.os == 'Linux'
if: runner.os == 'Linux' && matrix.os != 'arm-4core-linux'
uses: docker/setup-qemu-action@v2
with:
platforms: all

- name: Build wheels arm64
if: matrix.os == 'arm-4core-linux'
run: /home/runner/.local/bin/pipx run cibuildwheel==2.16.5 --platform linux
env:
# configure cibuildwheel to build native archs ('auto'), and some
# emulated ones
CIBW_ARCHS: ${{ matrix.archs }}
CIBW_BUILD: ${{ inputs.cibw_build }}
CIBW_SKIP: ${{ inputs.cibw_skip }}
CIBW_PRERELEASE_PYTHONS: ${{ inputs.cibw_prerelease_pythons }}
CMAKE_BUILD_PARALLEL_LEVEL: 12
CIBW_REPAIR_WHEEL_COMMAND_LINUX: |
mkdir ./tempwheelhouse &&
unzip -l {wheel} | grep '\.so' &&
auditwheel repair -w ./tempwheelhouse {wheel} &&
(yum install -y zip || apk add zip) &&
for w in ./tempwheelhouse/*.whl; do
zip -d $w \*.c \*.cpp \*.cc \*.h \*.hpp \*.pyx
mv $w {dest_dir}
done &&
rm -rf ./tempwheelhouse
CIBW_REPAIR_WHEEL_COMMAND_MACOS: |
zip -d {wheel} \*.c \*.cpp \*.cc \*.h \*.hpp \*.pyx &&
delocate-wheel --require-archs {delocate_archs} -w {dest_dir} -v {wheel}
CIBW_REPAIR_WHEEL_COMMAND_WINDOWS:
choco install -y 7zip &&
7z d -r "{wheel}" *.c *.cpp *.cc *.h *.hpp *.pyx &&
move "{wheel}" "{dest_dir}"
# DEV: Uncomment to debug MacOS
# CIBW_BUILD_VERBOSITY_MACOS: 3

- name: Build wheels
if: matrix.os != 'arm-4core-linux'
uses: pypa/[email protected]
env:
# configure cibuildwheel to build native archs ('auto'), and some
Expand Down Expand Up @@ -73,6 +119,7 @@ jobs:
move "{wheel}" "{dest_dir}"
# DEV: Uncomment to debug MacOS
# CIBW_BUILD_VERBOSITY_MACOS: 3

- uses: actions/upload-artifact@v3
with:
path: ./wheelhouse/*.whl
4 changes: 4 additions & 0 deletions .gitlab-ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -3,10 +3,12 @@ stages:
- deploy
- benchmarks
- benchmarks-pr-comment
- macrobenchmarks

include:
- remote: https://gitlab-templates.ddbuild.io/apm/packaging.yml
- local: ".gitlab/benchmarks.yml"
- local: ".gitlab/macrobenchmarks.yml"

variables:
DOWNSTREAM_BRANCH:
Expand All @@ -30,6 +32,7 @@ package:
when: on_success
script:
- ../.gitlab/build-deb-rpm.sh
- find . -iregex '.*\.\(deb\|rpm\)' -printf '%f\0' | xargs -0 dd-pkg lint

package-arm:
extends: .package-arm
Expand All @@ -40,6 +43,7 @@ package-arm:
when: on_success
script:
- ../.gitlab/build-deb-rpm.sh
- find . -iregex '.*\.\(deb\|rpm\)' -printf '%f\0' | xargs -0 dd-pkg lint

.release-package:
stage: deploy
Expand Down
86 changes: 86 additions & 0 deletions .gitlab/macrobenchmarks.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,86 @@
variables:
BASE_CI_IMAGE: 486234852809.dkr.ecr.us-east-1.amazonaws.com/ci/benchmarking-platform:dd-trace-py-macrobenchmarks

.macrobenchmarks:
stage: macrobenchmarks
needs: []
tags: ["runner:apm-k8s-same-cpu"]
timeout: 1h
rules:
- if: $CI_PIPELINE_SOURCE == "schedule"
when: always
- when: manual
## Next step, enable:
# - if: $CI_COMMIT_REF_NAME == "main"
# when: always
# If you have a problem with Gitlab cache, see Troubleshooting section in Benchmarking Platform docs
image: $BENCHMARKS_CI_IMAGE
script: |
git clone --branch python/macrobenchmarks https://gitlab-ci-token:${CI_JOB_TOKEN}@gitlab.ddbuild.io/DataDog/benchmarking-platform platform && cd platform
if [ "$BP_PYTHON_SCENARIO_DIR" == "flask-realworld" ]; then
bp-runner bp-runner.flask-realworld.yml --debug
else
bp-runner bp-runner.simple.yml --debug
fi
artifacts:
name: "artifacts"
when: always
paths:
- platform/artifacts/
expire_in: 3 months
variables:
# Benchmark's env variables. Modify to tweak benchmark parameters.
DD_TRACE_DEBUG: "false"
DD_RUNTIME_METRICS_ENABLED: "true"
DD_REMOTE_CONFIGURATION_ENABLED: "false"
DD_INSTRUMENTATION_TELEMETRY_ENABLED: "false"

K6_OPTIONS_NORMAL_OPERATION_RATE: 40
K6_OPTIONS_NORMAL_OPERATION_DURATION: 5m
K6_OPTIONS_NORMAL_OPERATION_GRACEFUL_STOP: 1m
K6_OPTIONS_NORMAL_OPERATION_PRE_ALLOCATED_VUS: 4
K6_OPTIONS_NORMAL_OPERATION_MAX_VUS: 4

K6_OPTIONS_HIGH_LOAD_RATE: 500
K6_OPTIONS_HIGH_LOAD_DURATION: 1m
K6_OPTIONS_HIGH_LOAD_GRACEFUL_STOP: 30s
K6_OPTIONS_HIGH_LOAD_PRE_ALLOCATED_VUS: 4
K6_OPTIONS_HIGH_LOAD_MAX_VUS: 4

# Gitlab and BP specific env vars. Do not modify.
FF_USE_LEGACY_KUBERNETES_EXECUTION_STRATEGY: "true"

# Workaround: Currently we're not running the benchmarks on every PR, but GitHub still shows them as pending.
# By marking the benchmarks as allow_failure, this should go away. (This workaround should be removed once the
# benchmarks get changed to run on every PR)
allow_failure: true

macrobenchmarks:
extends: .macrobenchmarks
parallel:
matrix:
- DD_BENCHMARKS_CONFIGURATION: baseline
BP_PYTHON_SCENARIO_DIR: flask-realworld
DDTRACE_INSTALL_VERSION: "git+https://github.com/Datadog/dd-trace-py@${CI_COMMIT_SHA}"

- DD_BENCHMARKS_CONFIGURATION: only-tracing
BP_PYTHON_SCENARIO_DIR: flask-realworld
DDTRACE_INSTALL_VERSION: "git+https://github.com/Datadog/dd-trace-py@${CI_COMMIT_SHA}"

- DD_BENCHMARKS_CONFIGURATION: only-tracing
BP_PYTHON_SCENARIO_DIR: flask-realworld
DDTRACE_INSTALL_VERSION: "git+https://github.com/Datadog/dd-trace-py@${CI_COMMIT_SHA}"
DD_REMOTE_CONFIGURATION_ENABLED: "false"
DD_INSTRUMENTATION_TELEMETRY_ENABLED: "true"

- DD_BENCHMARKS_CONFIGURATION: only-tracing
BP_PYTHON_SCENARIO_DIR: flask-realworld
DDTRACE_INSTALL_VERSION: "git+https://github.com/Datadog/dd-trace-py@${CI_COMMIT_SHA}"
DD_REMOTE_CONFIGURATION_ENABLED: "false"
DD_INSTRUMENTATION_TELEMETRY_ENABLED: "false"

- DD_BENCHMARKS_CONFIGURATION: only-tracing
BP_PYTHON_SCENARIO_DIR: flask-realworld
DDTRACE_INSTALL_VERSION: "git+https://github.com/Datadog/dd-trace-py@${CI_COMMIT_SHA}"
DD_REMOTE_CONFIGURATION_ENABLED: "true"
DD_INSTRUMENTATION_TELEMETRY_ENABLED: "true"
32 changes: 32 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,38 @@

Changelogs for versions not listed here can be found at https://github.com/DataDog/dd-trace-py/releases

---

## 2.7.10

### Bug Fixes

- Code Security: This fix solves an issue with fstrings where formatting was not applied to int parameters
- logging: This fix resolves an issue where `tracer.get_log_correlation_context()` incorrectly returned a 128-bit trace_id even with `DD_TRACE_128_BIT_TRACEID_LOGGING_ENABLED` set to `False` (the default), breaking log correlation. It now returns a 64-bit trace_id.
- profiling: Fixes a defect where the deprecated path to the Datadog span type was used by the profiler.

---

## 2.8.3


### Bug Fixes

- Code Security: This fix solves an issue with fstrings where formatting was not applied to int parameters
- logging: This fix resolves an issue where `tracer.get_log_correlation_context()` incorrectly returned a 128-bit trace_id even with `DD_TRACE_128_BIT_TRACEID_LOGGING_ENABLED` set to `False` (the default), breaking log correlation. It now returns a 64-bit trace_id.
- profiling: Fixes a defect where the deprecated path to the Datadog span type was used by the profiler.


---

## 2.6.12


### Bug Fixes

- Code Security: This fix solves an issue with fstrings where formatting was not applied to int parameters


---

## 2.8.2
Expand Down
5 changes: 2 additions & 3 deletions benchmarks/appsec_iast_propagation/scenario.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,7 @@
from typing import Any # noqa:F401

import bm

from tests.utils import override_env
from bm.utils import override_env


with override_env({"DD_IAST_ENABLED": "True"}):
Expand Down Expand Up @@ -42,7 +41,7 @@ def aspect_function(internal_loop, tainted):
value = ""
res = value
for _ in range(internal_loop):
res = add_aspect(res, join_aspect(str.join, 1, "_", (tainted, "_", tainted)))
res = add_aspect(res, join_aspect("_".join, 1, "_", (tainted, "_", tainted)))
value = res
res = add_aspect(res, tainted)
value = res
Expand Down
Loading

0 comments on commit 17acfed

Please sign in to comment.