Skip to content

Commit ac85475

Browse files
authored
Merge branch 'master' into andrea.marziali/complete-process-tags
2 parents db037c4 + 4c50454 commit ac85475

File tree

12 files changed

+92
-79
lines changed

12 files changed

+92
-79
lines changed

.github/workflows/README.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -106,7 +106,7 @@ _Recovery:_ Manually trigger the action again.
106106

107107
### analyze-changes [🔗](analyze-changes.yaml)
108108

109-
_Trigger:_ When pushing commits to `master` or any pull request targeting `master`.
109+
_Trigger:_ When pushing commits to `master`.
110110

111111
_Action:_
112112

.github/workflows/analyze-changes.yaml

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -61,6 +61,8 @@ jobs:
6161

6262
trivy:
6363
name: Analyze changes with Trivy
64+
# Don’t run on PR, only when pushing to master
65+
if: github.event_name == 'push' && github.ref == 'refs/heads/master'
6466
runs-on: ubuntu-latest
6567
permissions:
6668
actions: read

.gitlab-ci.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@ variables:
2424
BUILD_JOB_NAME: "build"
2525
DEPENDENCY_CACHE_POLICY: pull
2626
BUILD_CACHE_POLICY: pull
27-
GRADLE_VERSION: "8.4" # must match gradle-wrapper.properties
27+
GRADLE_VERSION: "8.5" # must match gradle-wrapper.properties
2828
MAVEN_REPOSITORY_PROXY: "http://artifactual.artifactual.all-clusters.local-dc.fabric.dog:8081/repository/maven-central/"
2929
GRADLE_PLUGIN_PROXY: "http://artifactual.artifactual.all-clusters.local-dc.fabric.dog:8081/repository/gradle-plugin-portal-proxy/"
3030
JAVA_BUILD_IMAGE_VERSION: "v25.05"

.gitlab/benchmarks.yml

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,6 @@
1414
- when: on_success
1515
script:
1616
- export ARTIFACTS_DIR="$(pwd)/reports" && mkdir -p "${ARTIFACTS_DIR}"
17-
- export CIRCLE_CI_TOKEN=$(aws ssm get-parameter --region us-east-1 --name ci.dd-trace-java.circleci_token --with-decryption --query "Parameter.Value" --out text)
1817
- git config --global url."https://gitlab-ci-token:${CI_JOB_TOKEN}@gitlab.ddbuild.io/DataDog/".insteadOf "https://github.com/DataDog/"
1918
- git clone --branch dd-trace-java/tracer-benchmarks https://github.com/DataDog/benchmarking-platform.git /platform && cd /platform
2019
artifacts:

dd-java-agent/agent-debugger/src/main/java/com/datadog/debugger/sink/SymbolSink.java

Lines changed: 30 additions & 56 deletions
Original file line numberDiff line numberDiff line change
@@ -105,16 +105,37 @@ public void flush() {
105105
String json =
106106
SERVICE_VERSION_ADAPTER.toJson(
107107
new ServiceVersion(serviceName, env, version, "JAVA", scopesToSerialize));
108-
if (json.length() > maxPayloadSize) {
109-
LOGGER.debug(
110-
"Upload split is required for {} scopes: {}/{}",
111-
scopesToSerialize.size(),
112-
json.length(),
113-
maxPayloadSize);
114-
splitAndSend(scopesToSerialize);
108+
updateStats(scopesToSerialize, json);
109+
doUpload(scopesToSerialize, json);
110+
}
111+
112+
private void doUpload(List<Scope> scopesToSerialize, String json) {
113+
byte[] jsonBytes = json.getBytes(StandardCharsets.UTF_8);
114+
byte[] payload = null;
115+
if (isCompressed) {
116+
payload = compressPayload(jsonBytes);
117+
}
118+
if (payload == null) {
119+
if (json.length() > maxPayloadSize) {
120+
LOGGER.warn("Payload is too big: {}/{}", json.length(), maxPayloadSize);
121+
splitAndSend(scopesToSerialize);
122+
return;
123+
}
124+
symbolUploader.uploadAsMultipart(
125+
"",
126+
event,
127+
new BatchUploader.MultiPartContent(jsonBytes, "file", "file.json", APPLICATION_JSON));
115128
} else {
116-
LOGGER.debug("Sending {} jar scopes size={}", scopesToSerialize.size(), json.length());
117-
doUpload(scopesToSerialize, json);
129+
if (payload.length > maxPayloadSize) {
130+
LOGGER.warn("Compressed payload is too big: {}/{}", payload.length, maxPayloadSize);
131+
splitAndSend(scopesToSerialize);
132+
return;
133+
}
134+
LOGGER.debug("Sending {} jar scopes size={}", scopesToSerialize.size(), payload.length);
135+
symbolUploader.uploadAsMultipart(
136+
"",
137+
event,
138+
new BatchUploader.MultiPartContent(payload, "file", "file.gz", APPLICATION_GZIP));
118139
}
119140
}
120141

@@ -146,16 +167,6 @@ private void splitAndSend(List<Scope> scopesToSerialize) {
146167
SERVICE_VERSION_ADAPTER.toJson(
147168
new ServiceVersion(
148169
serviceName, env, version, "JAVA", Collections.singletonList(scope)));
149-
if (json.length() > maxPayloadSize) {
150-
// this jar scope is still too big, split it by classes
151-
LOGGER.debug(
152-
"Upload split is required for jar scope {}: {}/{}",
153-
scope.getName(),
154-
json.length(),
155-
maxPayloadSize);
156-
splitAndSend(Collections.singletonList(scope));
157-
continue;
158-
}
159170
LOGGER.debug("Sending {} jar scope size={}", scope.getName(), json.length());
160171
doUpload(Collections.singletonList(scope), json);
161172
}
@@ -168,22 +179,10 @@ private void splitAndSend(List<Scope> scopesToSerialize) {
168179
String jsonFirstHalf =
169180
SERVICE_VERSION_ADAPTER.toJson(
170181
new ServiceVersion(serviceName, env, version, "JAVA", firstHalf));
171-
if (jsonFirstHalf.length() > maxPayloadSize) {
172-
LOGGER.warn(
173-
"Cannot split jar scope list in 2, first half is too big: {}",
174-
jsonFirstHalf.length());
175-
return;
176-
}
177182
doUpload(firstHalf, jsonFirstHalf);
178183
String jsonSecondHalf =
179184
SERVICE_VERSION_ADAPTER.toJson(
180185
new ServiceVersion(serviceName, env, version, "JAVA", secondHalf));
181-
if (jsonSecondHalf.length() > maxPayloadSize) {
182-
LOGGER.warn(
183-
"Cannot split jar scope list in 2, second half is too big: {}",
184-
jsonSecondHalf.length());
185-
return;
186-
}
187186
doUpload(secondHalf, jsonSecondHalf);
188187
}
189188
} else {
@@ -213,31 +212,6 @@ private void splitAndSend(List<Scope> scopesToSerialize) {
213212
}
214213
}
215214

216-
private void doUpload(List<Scope> scopes, String json) {
217-
updateStats(scopes, json);
218-
byte[] jsonBytes = json.getBytes(StandardCharsets.UTF_8);
219-
byte[] payload = null;
220-
if (isCompressed) {
221-
payload = compressPayload(jsonBytes);
222-
}
223-
if (payload == null) {
224-
if (jsonBytes.length > maxPayloadSize) {
225-
LOGGER.warn("Compressed payload is too big: {}/{}", payload.length, maxPayloadSize);
226-
splitAndSend(scopes);
227-
return;
228-
}
229-
symbolUploader.uploadAsMultipart(
230-
"",
231-
event,
232-
new BatchUploader.MultiPartContent(jsonBytes, "file", "file.json", APPLICATION_JSON));
233-
} else {
234-
symbolUploader.uploadAsMultipart(
235-
"",
236-
event,
237-
new BatchUploader.MultiPartContent(payload, "file", "file.gz", APPLICATION_GZIP));
238-
}
239-
}
240-
241215
private static Scope createJarScope(String jarName, List<Scope> classScopes) {
242216
return Scope.builder(ScopeType.JAR, jarName, 0, 0).name(jarName).scopes(classScopes).build();
243217
}

dd-java-agent/agent-debugger/src/main/resources/third_party_libraries.json

Lines changed: 1 addition & 1 deletion
Large diffs are not rendered by default.

dd-java-agent/agent-debugger/src/test/java/com/datadog/debugger/sink/SymbolSinkTest.java

Lines changed: 26 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,7 @@
1414
import java.util.Arrays;
1515
import java.util.Collections;
1616
import java.util.List;
17+
import okhttp3.MediaType;
1718
import org.junit.jupiter.api.Test;
1819

1920
class SymbolSinkTest {
@@ -202,6 +203,31 @@ public void splitByClassScopesImpossible() {
202203
assertTrue(symbolUploaderMock.multiPartContents.isEmpty());
203204
}
204205

206+
@Test
207+
public void maxCompressedAndSplit() {
208+
SymbolUploaderMock symbolUploaderMock = new SymbolUploaderMock();
209+
Config config = mock(Config.class);
210+
when(config.getServiceName()).thenReturn("service1");
211+
when(config.isSymbolDatabaseCompressed()).thenReturn(true);
212+
SymbolSink symbolSink = new SymbolSink(config, symbolUploaderMock, 512);
213+
final int NUM_JAR_SCOPES = 100;
214+
for (int i = 0; i < NUM_JAR_SCOPES; i++) {
215+
symbolSink.addScope(
216+
Scope.builder(ScopeType.JAR, "jar" + i + ".jar", 0, 0)
217+
.scopes(singletonList(Scope.builder(ScopeType.CLASS, "class" + i, 0, 0).build()))
218+
.build());
219+
}
220+
symbolSink.flush();
221+
assertEquals(4, symbolUploaderMock.multiPartContents.size());
222+
for (int i = 0; i < 4; i += 2) {
223+
BatchUploader.MultiPartContent eventContent = symbolUploaderMock.multiPartContents.get(i);
224+
assertEquals("event", eventContent.getPartName());
225+
BatchUploader.MultiPartContent symbolContent =
226+
symbolUploaderMock.multiPartContents.get(i + 1);
227+
assertEquals(MediaType.get("application/gzip"), symbolContent.getMediaType());
228+
}
229+
}
230+
205231
private static String assertMultipartContent(SymbolUploaderMock symbolUploaderMock, int index) {
206232
BatchUploader.MultiPartContent eventContent = symbolUploaderMock.multiPartContents.get(index);
207233
assertEquals("event", eventContent.getPartName());

dd-java-agent/instrumentation/scala/build.gradle

Lines changed: 13 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,17 @@ final testTasks = scalaVersions.collect { scalaLibrary ->
2727
def (major, minor) = version.split('_').collect(Integer.&valueOf)
2828
final javaConcatenation = major > 2 || minor > 11 // after 2.11 scala uses java.lang.StringBuilder to perform concatenation
2929

30-
final configuration = configurations.create("${version}Implementation")
30+
final configuration = configurations.create("${version}Implementation") {
31+
canBeConsumed = false
32+
canBeResolved = false
33+
canBeDeclared = true
34+
}
35+
final classPathConfiguration = configurations.create("${version}CompileClasspath") {
36+
canBeConsumed = false
37+
canBeResolved = true
38+
canBeDeclared = false
39+
extendsFrom(configuration)
40+
}
3141

3242
dependencies { handler ->
3343
handler.add(configuration.name, scalaLibrary)
@@ -40,7 +50,7 @@ final testTasks = scalaVersions.collect { scalaLibrary ->
4050
final customSourceSet = sourceSets.create("${version}") {
4151
scala {
4252
srcDirs = ['src/test/scala']
43-
compileClasspath += configuration
53+
compileClasspath += classPathConfiguration
4454
}
4555
}
4656

@@ -49,7 +59,7 @@ final testTasks = scalaVersions.collect { scalaLibrary ->
4959
.filter { !it.toString().contains('scala-library') } // exclude default scala-library
5060
.minus(files(sourceSets.test.scala.classesDirectory)) // exclude default /build/classes/scala/test folder
5161
.plus(customSourceSet.output.classesDirs) // add /build/classes/scala/${version} folder
52-
.plus(configuration) // add new scala-library configuration
62+
.plus(classPathConfiguration) // add new scala-library configuration
5363
systemProperty('uses.java.concat', javaConcatenation)
5464
dependsOn(tasks.named("compile${version.capitalize()}Scala"))
5565
group = 'verification'

dd-java-agent/instrumentation/spark-executor/build.gradle

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -37,9 +37,10 @@ dependencies {
3737
baseTestImplementation group: 'org.apache.spark', name: "spark-core_2.12", version: "2.4.0"
3838
baseTestImplementation group: 'org.apache.spark', name: "spark-sql_2.12", version: "2.4.0"
3939

40-
latest212DepTestImplementation group: 'org.apache.spark', name: "spark-core_2.12", version: '3.+'
41-
latest212DepTestImplementation group: 'org.apache.spark', name: "spark-sql_2.12", version: '3.+'
40+
// FIXME: 3.6.0 seems missing from central
41+
latest212DepTestImplementation group: 'org.apache.spark', name: "spark-core_2.12", version: '3.5.5'
42+
latest212DepTestImplementation group: 'org.apache.spark', name: "spark-sql_2.12", version: '3.5.5'
4243

43-
latest213DepTestImplementation group: 'org.apache.spark', name: "spark-core_2.13", version: '3.+'
44-
latest213DepTestImplementation group: 'org.apache.spark', name: "spark-sql_2.13", version: '3.+'
44+
latest213DepTestImplementation group: 'org.apache.spark', name: "spark-core_2.13", version: '3.5.5'
45+
latest213DepTestImplementation group: 'org.apache.spark', name: "spark-sql_2.13", version: '3.5.5'
4546
}

dd-java-agent/instrumentation/spark/spark_2.13/build.gradle

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -52,9 +52,10 @@ dependencies {
5252
test_spark32Implementation group: 'org.apache.spark', name: "spark-yarn_$scalaVersion", version: "3.2.4"
5353

5454
// FIXME: Currently not working on Spark 4.0.0 preview releases.
55-
latestDepTestImplementation group: 'org.apache.spark', name: "spark-core_$scalaVersion", version: '3.+'
56-
latestDepTestImplementation group: 'org.apache.spark', name: "spark-sql_$scalaVersion", version: '3.+'
57-
latestDepTestImplementation group: 'org.apache.spark', name: "spark-yarn_$scalaVersion", version: '3.+'
55+
// FIXME: 3.6.0 seems missing from central
56+
latestDepTestImplementation group: 'org.apache.spark', name: "spark-core_$scalaVersion", version: '3.5.5'
57+
latestDepTestImplementation group: 'org.apache.spark', name: "spark-sql_$scalaVersion", version: '3.5.5'
58+
latestDepTestImplementation group: 'org.apache.spark', name: "spark-yarn_$scalaVersion", version: '3.5.5'
5859
}
5960

6061
tasks.named("test").configure {

0 commit comments

Comments
 (0)