Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
30 changes: 29 additions & 1 deletion .gitlab-ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@ include:
- .gitlab/post_rc_build/post_rc_tasks.yml
- .gitlab/trigger_distribution/trigger_distribution.yml
- .gitlab/trigger_distribution/conditions.yml
- .gitlab/dynamic_test/include.yml
- .gitlab/setup/setup.yml
- .gitlab/source_test/include.yml
- .gitlab/scan/windows.yml
Expand Down Expand Up @@ -84,6 +85,7 @@ stages:
- e2e_install_packages
- functional_test
- trigger_distribution
- dynamic_test
- junit_upload
- internal_kubernetes_deploy
- post_rc_build
Expand Down Expand Up @@ -154,7 +156,7 @@ variables:
S3_DD_AGENT_OMNIBUS_BTFS_URI: s3://dd-agent-omnibus/btfs
S3_DD_AGENT_OMNIBUS_JAVA_URI: s3://dd-agent-omnibus/openjdk
BTFHUB_ARCHIVE_BRANCH: main
COMPARE_TO_BRANCH: main
COMPARE_TO_BRANCH: kfairise/codecov-e2e-tests
GENERAL_ARTIFACTS_CACHE_BUCKET_URL: https://dd-agent-omnibus.s3.amazonaws.com
S3_DSD6_URI: s3://dsd6-staging

Expand Down Expand Up @@ -626,6 +628,28 @@ workflow:
- release.json
compare_to: $COMPARE_TO_BRANCH

.except_e2e_main_release_or_rc: # This rule is to not trigger dynamic test evaluation when we force all e2e tests to run
- <<: *if_disable_e2e_tests
when: never
- !reference [.except_mergequeue]
- <<: *if_run_all_e2e_tests
when: never
- <<: *if_main_branch
when: never
- <<: *if_release_branch
when: never
- if: $CI_COMMIT_TAG =~ /^[0-9]+\.[0-9]+\.[0-9]+-rc\.[0-9]+$/
when: never
- changes:
paths:
- .gitlab/e2e/e2e.yml
- test/new-e2e/pkg/**/*
- test/new-e2e/go.mod
- flakes.yaml
- release.json
compare_to: $COMPARE_TO_BRANCH
when: never

.on_e2e_or_windows_installer_changes:
- !reference [.on_e2e_main_release_or_rc]
- <<: *if_windows_installer_changes
Expand Down Expand Up @@ -1097,6 +1121,10 @@ workflow:
- <<: *if_coverage_pipeline
when: never

.always_on_coverage_pipeline:
- <<: *if_coverage_pipeline
when: always

# This is used to setup utils to report custom datadog-ci spans
.setup-datadog-ci-sections:
- |
Expand Down
3 changes: 3 additions & 0 deletions .gitlab/.ci-linters.yml
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ needs-rules:
- build_dogstatsd_static-binary_x64
- build_processed_btfhub_archive
- check_already_deployed_version_7
- dynamic_test-evaluate-e2e
- compute_gitlab_ci_config
- dogstatsd_x64_size_test
- go_mod_tidy_check
Expand Down Expand Up @@ -49,10 +50,12 @@ needs-rules:
- trigger_distribution_on_failure
- installer_trigger_auto_staging_release
- installer_trigger_manual_prod_release
- index-consolidation

# Lists jobs that are allowed to not be within JOBOWNERS
job-owners:
allowed-jobs:
- index-consolidation
- benchmark
- build_dogstatsd-binary_arm64
- build_dogstatsd-binary_x64
Expand Down
1 change: 1 addition & 0 deletions .gitlab/JOBOWNERS
Original file line number Diff line number Diff line change
Expand Up @@ -168,6 +168,7 @@ k8s-e2e-cspm-* @DataDog/agent-security
e2e_pre_test* @DataDog/agent-devx
new-e2e* @DataDog/multiple
go_e2e_test_binaries @DataDog/agent-devx
dynamic_test-evaluate-e2e @DataDog/agent-devx

# Kernel matrix testing
upload_dependencies* @DataDog/ebpf-platform
Expand Down
10 changes: 10 additions & 0 deletions .gitlab/dynamic_test/consolidate.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
index-consolidation:
stage: dynamic_test
image: registry.ddbuild.io/ci/datadog-agent-buildimages/linux$CI_IMAGE_LINUX_SUFFIX:$CI_IMAGE_LINUX
tags: ["arch:arm64"]
dependencies: []
rules:
- !reference [.always_on_coverage_pipeline]
script:
- pip install boto3==1.38.8 # TODO: Remove this before merging, after dda is bumped in test-infra-definitions
- dda inv -- -e dyntest.consolidate-index-in-s3 --commit-sha $CI_COMMIT_SHA --bucket-uri $S3_PERMANENT_ARTIFACTS_URI
15 changes: 15 additions & 0 deletions .gitlab/dynamic_test/evaluate.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
dynamic_test-evaluate-e2e:
stage: dynamic_test
image: registry.ddbuild.io/ci/datadog-agent-buildimages/linux$CI_IMAGE_LINUX_SUFFIX:$CI_IMAGE_LINUX
tags: ["arch:arm64"]
dependencies: []
rules:
- !reference [.except_coverage_pipeline]
- !reference [.except_e2e_main_release_or_rc]
- when: always
script:
- DD_API_KEY=$($CI_PROJECT_DIR/tools/ci/fetch_secret.sh $AGENT_API_KEY_ORG2 token) || exit $?; export DD_API_KEY
- DD_APP_KEY=$($CI_PROJECT_DIR/tools/ci/fetch_secret.sh $AGENT_APP_KEY_ORG2 token) || exit $?; export DD_APP_KEY
- pip install boto3==1.38.8 # TODO: Remove this before merging, after dda is bumped in test-infra-definitions
- dda inv -- -e dyntest.evaluate-index --bucket-uri $S3_PERMANENT_ARTIFACTS_URI --commit-sha $CI_COMMIT_SHA --pipeline-id $CI_PIPELINE_ID
allow_failure: true # Here to evaluate the system, should not impact the CI
3 changes: 3 additions & 0 deletions .gitlab/dynamic_test/include.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
include:
- .gitlab/dynamic_test/consolidate.yml
- .gitlab/dynamic_test/evaluate.yml
2 changes: 2 additions & 0 deletions .gitlab/e2e/e2e.yml
Original file line number Diff line number Diff line change
Expand Up @@ -90,6 +90,8 @@
- |
if [ -d "$E2E_COVERAGE_OUT_DIR" ]; then
dda inv -- -e coverage.process-e2e-coverage-folders $E2E_COVERAGE_OUT_DIR
pip install boto3==1.38.8 # TODO: Remove this before merging, after dda is bumped in test-infra-definitions
dda inv -- -e dyntest.compute-and-upload-job-index --bucket-uri $S3_PERMANENT_ARTIFACTS_URI --coverage-folder $E2E_COVERAGE_OUT_DIR --commit-sha $CI_COMMIT_SHA --job-id $CI_JOB_ID
fi
artifacts:
expire_in: 2 weeks
Expand Down
2 changes: 1 addition & 1 deletion pkg/gpu/cgroups.go
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@ const (
// getAbsoluteCgroupForProcess gets the absolute cgroup path for a process independently of whether
// we are inside a container or not, or of thecgroup version being used.
// rootfs is the root filesystem path (usually /, but can be d ifferent to allow unit testing)
// hostRoot is the path to the host root filesystem, relative to rootfs
// hostRoot is the path to the hi ost root filesystem, relative to rootfs
// currentProcessPid is the PID of the process currently running (os.Getpid(), but can be different for unit testing)
// targetProcessPid is the PID of the process whose cgroup we want to get
func getAbsoluteCgroupForProcess(rootfs, hostRoot string, currentProcessPid, targetProcessPid uint32, cgroupMode cgroups.CGMode) (string, error) {
Expand Down
2 changes: 2 additions & 0 deletions tasks/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
diff,
docker_tasks,
dogstatsd,
dyntest,
ebpf,
emacs,
epforwarder,
Expand Down Expand Up @@ -184,6 +185,7 @@
ns.add_collection(emacs)
ns.add_collection(vim)
ns.add_collection(macos)
ns.add_collection(dyntest)
ns.add_collection(epforwarder)
ns.add_collection(fips)
ns.add_collection(go)
Expand Down
43 changes: 43 additions & 0 deletions tasks/dyntest.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
"""
Invoke task to handle dynamic tests.
"""

import os

from invoke import Context, task

from tasks.libs.common.git import get_modified_files
from tasks.libs.dynamic_test.backend import S3Backend
from tasks.libs.dynamic_test.evaluator import DatadogDynTestEvaluator
from tasks.libs.dynamic_test.executor import DynTestExecutor
from tasks.libs.dynamic_test.index import IndexKind
from tasks.libs.dynamic_test.indexers.e2e import CoverageDynTestIndexer


@task
def compute_and_upload_job_index(ctx: Context, bucket_uri: str, coverage_folder: str, commit_sha: str, job_id: str):
indexer = CoverageDynTestIndexer(coverage_folder)
index = indexer.compute_index(ctx)
uploader = S3Backend(bucket_uri)
uploader.upload_index(index, IndexKind.PACKAGE, f"{commit_sha}/{job_id}")


@task
def consolidate_index_in_s3(_: Context, bucket_uri: str, commit_sha: str):
uploader = S3Backend(bucket_uri)
index = uploader.consolidate_index(IndexKind.PACKAGE, commit_sha)
uploader.upload_index(index, IndexKind.PACKAGE, commit_sha)


@task
def evaluate_index(ctx: Context, bucket_uri: str, commit_sha: str, pipeline_id: str):
uploader = S3Backend(bucket_uri)
executor = DynTestExecutor(ctx, uploader, IndexKind.PACKAGE, commit_sha)
evaluator = DatadogDynTestEvaluator(ctx, IndexKind.PACKAGE, executor, pipeline_id)
if not evaluator.initialize():
print("Failed to initialize index")
return
changes = get_modified_files(ctx)
results = evaluator.evaluate([os.path.dirname(change) for change in changes])
evaluator.print_summary(results)
evaluator.send_stats_to_datadog(results)
42 changes: 42 additions & 0 deletions tasks/libs/common/datadog_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -120,3 +120,45 @@ def get_ci_pipeline_events(query, days):
page_limit=5,
)
return response


def get_ci_test_events(query, days):
"""
Fetch test events using Datadog CI Visibility API
Returns all results by handling pagination automatically
"""
from datadog_api_client import ApiClient, Configuration
from datadog_api_client.v2.api.ci_visibility_tests_api import CIVisibilityTestsApi
configuration = Configuration()
all_events = []
page_cursor = None

with ApiClient(configuration) as api_client:
api = CIVisibilityTestsApi(api_client)

while True:
# We filter jobs of a single pipeline by its id and job name
kwargs = {
"filter_query": query,
"page_limit": 1000,
"filter_from": (datetime.now() - timedelta(days=days)),
"filter_to": datetime.now(),
}

if page_cursor:
kwargs["page_cursor"] = page_cursor

response = api.list_ci_app_test_events(**kwargs)
# Add events from this page to our collection
if hasattr(response, 'data') and response.data:
all_events.extend(response.data)

# Check if there are more pages
if hasattr(response, 'meta') and hasattr(response.meta, 'page') and hasattr(response.meta.page, 'after'):
page_cursor = response.meta.page.after
if not page_cursor: # No more pages
break
else:
break # No pagination metadata, assume single page

return all_events
Loading
Loading