-
Notifications
You must be signed in to change notification settings - Fork 516
Expand file tree
/
Copy path.gitlab-ci.yml
More file actions
155 lines (143 loc) · 6.5 KB
/
.gitlab-ci.yml
File metadata and controls
155 lines (143 loc) · 6.5 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
stages:
- docker-images-reliability-env
- generate
- benchmarks
- macrobenchmarks
# These benchmarks are planned to replace the legacy macrobenchmarks long term
- go-go-prof-app-parallel
- go-go-prof-app-parallel-slo
- gates
- config-validation
- test-apps
variables:
# This base image is created here: https://gitlab.ddbuild.io/DataDog/apm-reliability/benchmarking-platform/-/jobs/1577883208
BASE_CI_IMAGE: 486234852809.dkr.ecr.us-east-1.amazonaws.com/ci/benchmarking-platform:dd-trace-go-106683130
INDEX_FILE: index.txt
BENCHMARK_TARGETS: "BenchmarkStartRequestSpan|BenchmarkHttpServeTrace|BenchmarkTracerAddSpans|BenchmarkStartSpan|BenchmarkSingleSpanRetention|BenchmarkOTelApiWithCustomTags|BenchmarkInjectW3C|BenchmarkExtractW3C|BenchmarkPartialFlushing|BenchmarkConfig|BenchmarkStartSpanConfig|BenchmarkGraphQL|BenchmarkSampleWAFContext|BenchmarkCaptureStackTrace|BenchmarkSetTagString|BenchmarkSetTagStringPtr|BenchmarkSetTagMetric|BenchmarkSetTagStringer|BenchmarkSerializeSpanLinksInMeta|BenchmarkLogs|BenchmarkParallelLogs|BenchmarkMetrics|BenchmarkParallelMetrics|BenchmarkPayloadVersions|BenchmarkOTLPTraceWriterAdd|BenchmarkOTLPTraceWriterFlush|BenchmarkOTLPProtoMarshal|BenchmarkOTLPProtoSize|BenchmarkOTLPTraceWriterConcurrent"
workflow:
auto_cancel:
on_new_commit: interruptible
.benchmark-rules: &benchmark-rules
- if: '$CI_COMMIT_TAG =~ /^.*\/v\d+\.\d+\.\d+(?:-.*\.\d+)?$/'
when: never
- if: '$CI_COMMIT_BRANCH =~ /^graphite-base\/.*$/'
when: never
- if: '$CI_COMMIT_BRANCH =~ /^mq-working-branch-.*$/'
when: never
- if: '$CI_COMMIT_BRANCH == "main"'
when: always
interruptible: false
- when: always
interruptible: true
# In order to run benchmarks in parallel, we generate a matrix of test names based on the BENCHMARK_TARGETS variable.
# This will be used in tandem with bp-runner in benchmarks.yml.
# This will allow us to spin up a child job in GitLab CI that handles running all of the benchmarks in parallel.
generate_matrix:
stage: generate
image: $BASE_CI_IMAGE
tags: ["arch:amd64"]
rules: *benchmark-rules
script: |
echo "=== Debug: Environment ==="
echo "Image: ${BASE_CI_IMAGE}"
go version
go env GOTOOLCHAIN GOWORK
echo "=== End Debug ==="
cd .gitlab
GOWORK=off go run generate_config.go
mv generated_benchmark_matrix.yml ../generated_benchmark_matrix.yml
artifacts:
paths:
- generated_benchmark_matrix.yml
expire_in: 1 hour # Artifact is temporary, needed only for the current pipeline
trigger_child_pipeline:
stage: benchmarks
trigger:
include:
- artifact: generated_benchmark_matrix.yml
job: generate_matrix
strategy: depend
needs:
- generate_matrix
rules: *benchmark-rules
variables:
PARENT_PIPELINE_ID: $CI_PIPELINE_ID
check-big-regressions:
stage: benchmarks
needs:
- job: trigger_child_pipeline
rules: *benchmark-rules
when: on_success
tags:
- "arch:amd64"
image: $BASE_CI_IMAGE
script:
- |
pwd
export ARTIFACTS_DIR="$(pwd)/reports" && (mkdir "${ARTIFACTS_DIR}" || :)
git config --global url."https://gitlab-ci-token:${CI_JOB_TOKEN}@gitlab.ddbuild.io/DataDog/".insteadOf "https://github.com/DataDog/"
git clone --branch dd-trace-go https://github.com/DataDog/benchmarking-platform platform && cd platform
./steps/download-child-pipeline-artifacts.sh
pushd "${ARTIFACTS_DIR}/"
pwd
bp-runner ../platform/bp-runner.fail-on-regression.yml --debug
artifacts:
name: "artifacts"
when: always
paths:
- reports/
expire_in: "30 days"
analyze-benchmark-results:
stage: benchmarks
needs:
- job: trigger_child_pipeline
rules: *benchmark-rules
when: always
tags:
- "arch:amd64"
image: $BASE_CI_IMAGE
script:
- |
mkdir -p reports
export ARTIFACTS_DIR="$(pwd)/reports"
git config --global url."https://gitlab-ci-token:${CI_JOB_TOKEN}@gitlab.ddbuild.io/DataDog/".insteadOf "https://github.com/DataDog/"
git clone --branch dd-trace-go https://github.com/DataDog/benchmarking-platform platform && cd platform
./steps/download-child-pipeline-artifacts.sh
echo "All benchmark artifacts collected:"
ls -la "${ARTIFACTS_DIR}/"
./steps/analyze-results.sh
./steps/post-pr-comment.sh
artifacts:
name: "reports"
when: always
paths:
- reports/
expire_in: 3 months
# Config Registry CI Jobs
validate_supported_configurations_v2_local_file:
stage: config-validation
image: registry.ddbuild.io/ci/libdatadog-build/packaging:100425777
tags: ["runner:apm-k8s-tweaked-metal"]
rules:
- when: on_success
extends: .validate_supported_configurations_v2_local_file
variables:
LOCAL_JSON_PATH: internal/env/supported_configurations.json
BACKFILLED: true
update_central_configurations_version_range_v2:
stage: config-validation
image: registry.ddbuild.io/ci/libdatadog-build/packaging:100425777
tags: ["runner:apm-k8s-tweaked-metal"]
extends: .update_central_configurations_version_range_v2
variables:
LOCAL_REPO_NAME: "dd-trace-go"
LOCAL_JSON_PATH: "internal/env/supported_configurations.json"
LANGUAGE_NAME: "golang"
include:
- local: ".gitlab/macrobenchmarks.yml"
- local: ".gitlab/test-apps.yml"
- local: ".gitlab/docker-images-reliability-env.yml"
- local: ".gitlab/configuration-central-validation.yml"
- project: 'DataDog/apm-reliability/apm-sdks-benchmarks'
file: '.gitlab/ci-go-go-prof-app-parallel.yml'
ref: 'main'