diff --git a/core/src/main/scala/kafka/tools/StreamsResetter.java b/core/src/main/scala/kafka/tools/StreamsResetter.java index d356681e11242..3570fa59a2ff2 100644 --- a/core/src/main/scala/kafka/tools/StreamsResetter.java +++ b/core/src/main/scala/kafka/tools/StreamsResetter.java @@ -38,7 +38,7 @@ import org.apache.kafka.common.serialization.ByteArrayDeserializer; import org.apache.kafka.common.utils.Exit; import org.apache.kafka.common.utils.Utils; -import scala.collection.JavaConverters; +import scala.jdk.CollectionConverters; import java.io.IOException; import java.text.ParseException; @@ -299,7 +299,7 @@ private void checkInvalidArgs(final OptionParser optionParser, optionParser, options, option, - JavaConverters.asScalaSetConverter(invalidOptions).asScala()); + CollectionConverters.SetHasAsScala(invalidOptions).asScala()); } private int maybeResetInputAndSeekToEndIntermediateTopicOffsets(final Map consumerConfig, diff --git a/jmh-benchmarks/src/main/java/org/apache/kafka/jmh/acl/AclAuthorizerBenchmark.java b/jmh-benchmarks/src/main/java/org/apache/kafka/jmh/acl/AclAuthorizerBenchmark.java index 060ff3a210d8b..8c197b7f0a4dd 100644 --- a/jmh-benchmarks/src/main/java/org/apache/kafka/jmh/acl/AclAuthorizerBenchmark.java +++ b/jmh-benchmarks/src/main/java/org/apache/kafka/jmh/acl/AclAuthorizerBenchmark.java @@ -48,8 +48,8 @@ import org.openjdk.jmh.annotations.State; import org.openjdk.jmh.annotations.TearDown; import org.openjdk.jmh.annotations.Warmup; -import scala.collection.JavaConverters; import scala.collection.immutable.TreeMap; +import scala.jdk.CollectionConverters; import java.lang.reflect.Field; import java.net.InetAddress; @@ -144,7 +144,7 @@ private TreeMap prepareAclCache() { TreeMap aclCache = new TreeMap<>(new AclAuthorizer.ResourceOrdering()); for (Map.Entry> entry : aclEntries.entrySet()) { aclCache = aclCache.updated(entry.getKey(), - new VersionedAcls(JavaConverters.asScalaSetConverter(entry.getValue()).asScala().toSet(), 1)); + new VersionedAcls(CollectionConverters.SetHasAsScala(entry.getValue()).asScala().toSet(), 1)); } return aclCache; diff --git a/jmh-benchmarks/src/main/java/org/apache/kafka/jmh/fetcher/ReplicaFetcherThreadBenchmark.java b/jmh-benchmarks/src/main/java/org/apache/kafka/jmh/fetcher/ReplicaFetcherThreadBenchmark.java index 22c3d2cd41d66..cec4a053b701e 100644 --- a/jmh-benchmarks/src/main/java/org/apache/kafka/jmh/fetcher/ReplicaFetcherThreadBenchmark.java +++ b/jmh-benchmarks/src/main/java/org/apache/kafka/jmh/fetcher/ReplicaFetcherThreadBenchmark.java @@ -71,9 +71,9 @@ import org.openjdk.jmh.annotations.Warmup; import scala.Option; import scala.collection.Iterator; -import scala.collection.JavaConverters; import scala.compat.java8.OptionConverters; import scala.collection.Map; +import scala.jdk.CollectionConverters; import java.io.File; import java.io.IOException; @@ -119,8 +119,8 @@ public void setup() throws IOException { List logDirs = Collections.singletonList(logDir); BrokerTopicStats brokerTopicStats = new BrokerTopicStats(); LogDirFailureChannel logDirFailureChannel = Mockito.mock(LogDirFailureChannel.class); - logManager = new LogManager(JavaConverters.asScalaIteratorConverter(logDirs.iterator()).asScala().toSeq(), - JavaConverters.asScalaIteratorConverter(new ArrayList().iterator()).asScala().toSeq(), + logManager = new LogManager(CollectionConverters.IteratorHasAsScala(logDirs.iterator()).asScala().toSeq(), + CollectionConverters.IteratorHasAsScala(new ArrayList().iterator()).asScala().toSeq(), new scala.collection.mutable.HashMap<>(), logConfig, new CleanerConfig(0, 0, 0, 0, 0, 0.0, 0, false, "MD5"), diff --git a/jmh-benchmarks/src/main/java/org/apache/kafka/jmh/partition/PartitionMakeFollowerBenchmark.java b/jmh-benchmarks/src/main/java/org/apache/kafka/jmh/partition/PartitionMakeFollowerBenchmark.java index b1b587c4cd735..5bffcadf52a6a 100644 --- a/jmh-benchmarks/src/main/java/org/apache/kafka/jmh/partition/PartitionMakeFollowerBenchmark.java +++ b/jmh-benchmarks/src/main/java/org/apache/kafka/jmh/partition/PartitionMakeFollowerBenchmark.java @@ -53,7 +53,7 @@ import org.openjdk.jmh.annotations.TearDown; import org.openjdk.jmh.annotations.Warmup; import scala.Option; -import scala.collection.JavaConverters; +import scala.jdk.CollectionConverters; import java.io.File; import java.io.IOException; @@ -96,8 +96,8 @@ public void setup() throws IOException { List logDirs = Collections.singletonList(logDir); BrokerTopicStats brokerTopicStats = new BrokerTopicStats(); LogDirFailureChannel logDirFailureChannel = Mockito.mock(LogDirFailureChannel.class); - logManager = new LogManager(JavaConverters.asScalaIteratorConverter(logDirs.iterator()).asScala().toSeq(), - JavaConverters.asScalaIteratorConverter(new ArrayList().iterator()).asScala().toSeq(), + logManager = new LogManager(CollectionConverters.IteratorHasAsScala(logDirs.iterator()).asScala().toSeq(), + CollectionConverters.IteratorHasAsScala(new ArrayList().iterator()).asScala().toSeq(), new scala.collection.mutable.HashMap<>(), logConfig, new CleanerConfig(0, 0, 0, 0, 0, 0.0, 0, false, "MD5"), diff --git a/jmh-benchmarks/src/main/java/org/apache/kafka/jmh/partition/UpdateFollowerFetchStateBenchmark.java b/jmh-benchmarks/src/main/java/org/apache/kafka/jmh/partition/UpdateFollowerFetchStateBenchmark.java index 2253b08b4de41..7abdca5c79f8b 100644 --- a/jmh-benchmarks/src/main/java/org/apache/kafka/jmh/partition/UpdateFollowerFetchStateBenchmark.java +++ b/jmh-benchmarks/src/main/java/org/apache/kafka/jmh/partition/UpdateFollowerFetchStateBenchmark.java @@ -50,7 +50,7 @@ import org.openjdk.jmh.annotations.TearDown; import org.openjdk.jmh.annotations.Warmup; import scala.Option; -import scala.collection.JavaConverters; +import scala.jdk.CollectionConverters; import java.io.File; import java.util.ArrayList; @@ -81,8 +81,8 @@ public void setUp() { scheduler.startup(); LogConfig logConfig = createLogConfig(); List logDirs = Collections.singletonList(logDir); - logManager = new LogManager(JavaConverters.asScalaIteratorConverter(logDirs.iterator()).asScala().toSeq(), - JavaConverters.asScalaIteratorConverter(new ArrayList().iterator()).asScala().toSeq(), + logManager = new LogManager(CollectionConverters.IteratorHasAsScala(logDirs.iterator()).asScala().toSeq(), + CollectionConverters.IteratorHasAsScala(new ArrayList().iterator()).asScala().toSeq(), new scala.collection.mutable.HashMap<>(), logConfig, new CleanerConfig(0, 0, 0, 0, 0, 0.0, 0, false, "MD5"), diff --git a/jmh-benchmarks/src/main/java/org/apache/kafka/jmh/server/CheckpointBench.java b/jmh-benchmarks/src/main/java/org/apache/kafka/jmh/server/CheckpointBench.java index 7c9c6de6bf993..1d178566ea0b6 100644 --- a/jmh-benchmarks/src/main/java/org/apache/kafka/jmh/server/CheckpointBench.java +++ b/jmh-benchmarks/src/main/java/org/apache/kafka/jmh/server/CheckpointBench.java @@ -60,7 +60,7 @@ import java.util.concurrent.atomic.AtomicBoolean; import java.util.stream.Collectors; -import scala.collection.JavaConverters; +import scala.jdk.CollectionConverters; import scala.Option; @Warmup(iterations = 5) @@ -105,8 +105,8 @@ public void setup() { this.time = new MockTime(); this.failureChannel = new LogDirFailureChannel(brokerProperties.logDirs().size()); final List files = - JavaConverters.seqAsJavaList(brokerProperties.logDirs()).stream().map(File::new).collect(Collectors.toList()); - this.logManager = TestUtils.createLogManager(JavaConverters.asScalaBuffer(files), + CollectionConverters.SeqHasAsJava(brokerProperties.logDirs()).asJava().stream().map(File::new).collect(Collectors.toList()); + this.logManager = TestUtils.createLogManager(CollectionConverters.ListHasAsScala(files).asScala(), LogConfig.apply(), CleanerConfig.apply(1, 4 * 1024 * 1024L, 0.9d, 1024 * 1024, 32 * 1024 * 1024, Double.MAX_VALUE, 15 * 1000, true, "MD5"), time); @@ -167,7 +167,7 @@ public void tearDown() throws Exception { this.metrics.close(); this.scheduler.shutdown(); this.quotaManagers.shutdown(); - for (File dir : JavaConverters.asJavaCollection(logManager.liveLogDirs())) { + for (File dir : CollectionConverters.SeqHasAsJava(logManager.liveLogDirs()).asJava()) { Utils.delete(dir); } }