From 98f17fae5cb602437672c0d0248480433d6a4636 Mon Sep 17 00:00:00 2001 From: BlindSpot <127803250+blindspotbounty@users.noreply.github.com> Date: Tue, 28 Nov 2023 17:07:52 +0200 Subject: [PATCH 01/25] benchmark for consumer --- .../KafkaConsumerBenchmark.swift | 298 +++++++++++++++++- .../Utilities.swift | 128 ++++++++ .../ForTesting/RDKafkaClient+Topic.swift | 157 +++++++++ Sources/Kafka/ForTesting/TestMessages.swift | 104 ++++++ Sources/Kafka/KafkaConsumer.swift | 4 +- Sources/Kafka/RDKafka/RDKafkaClient.swift | 3 +- Tests/IntegrationTests/KafkaTests.swift | 53 +--- Tests/IntegrationTests/Utilities.swift | 158 ---------- 8 files changed, 696 insertions(+), 209 deletions(-) create mode 100644 Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/Utilities.swift create mode 100644 Sources/Kafka/ForTesting/RDKafkaClient+Topic.swift create mode 100644 Sources/Kafka/ForTesting/TestMessages.swift diff --git a/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/KafkaConsumerBenchmark.swift b/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/KafkaConsumerBenchmark.swift index d725cf4a..d7d41cb4 100644 --- a/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/KafkaConsumerBenchmark.swift +++ b/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/KafkaConsumerBenchmark.swift @@ -13,9 +13,18 @@ //===----------------------------------------------------------------------===// import Benchmark +import Crdkafka +import Dispatch +import struct Foundation.Date +import struct Foundation.UUID import Kafka +import Logging +import ServiceLifecycle let benchmarks = { + var uniqueTestTopic: String! + let messageCount: UInt = 1000 + Benchmark.defaultConfiguration = .init( metrics: [.wallClock, .cpuTotal, .allocatedResidentMemory, .contextSwitches, .throughput] + .arc, warmupIterations: 0, @@ -24,7 +33,292 @@ let benchmarks = { maxIterations: 100 ) - Benchmark.setup = {} + Benchmark.setup = { + uniqueTestTopic = try await prepareTopic(messagesCount: messageCount, partitions: 6) + } + + Benchmark.teardown = { + if let uniqueTestTopic { + try deleteTopic(uniqueTestTopic) + } + uniqueTestTopic = nil + } + + Benchmark("SwiftKafkaConsumer - basic consumer (messages: \(messageCount))") { benchmark in + let uniqueGroupID = UUID().uuidString + var consumerConfig = KafkaConsumerConfiguration( + consumptionStrategy: .group( + id: uniqueGroupID, + topics: [uniqueTestTopic] + ), + bootstrapBrokerAddresses: [brokerAddress] + ) + consumerConfig.autoOffsetReset = .beginning + consumerConfig.broker.addressFamily = .v4 + + let consumer = try KafkaConsumer( + configuration: consumerConfig, + logger: .perfLogger + ) + + let serviceGroupConfiguration2 = ServiceGroupConfiguration(services: [consumer], gracefulShutdownSignals: [.sigterm, .sigint], logger: .perfLogger) + let serviceGroup2 = ServiceGroup(configuration: serviceGroupConfiguration2) + + try await withThrowingTaskGroup(of: Void.self) { group in + benchLog("Start consuming") + defer { + benchLog("Finish consuming") + } + // Run Task + group.addTask { + try await serviceGroup2.run() + } + + // Second Consumer Task + group.addTask { + var ctr: UInt64 = 0 + var tmpCtr: UInt64 = 0 + let interval: UInt64 = Swift.max(UInt64(messageCount / 20), 1) + let totalStartDate = Date.timeIntervalSinceReferenceDate + var totalBytes: UInt64 = 0 + + try await benchmark.withMeasurement { + for try await record in consumer.messages { + ctr += 1 + totalBytes += UInt64(record.value.readableBytes) + + tmpCtr += 1 + if tmpCtr >= interval { + benchLog("read \(ctr * 100 / UInt64(messageCount))%") + tmpCtr = 0 + } + if ctr >= messageCount { + break + } + } + } + + let timeIntervalTotal = Date.timeIntervalSinceReferenceDate - totalStartDate + let avgRateMb = Double(totalBytes) / timeIntervalTotal / 1024 + benchLog("All read up to ctr: \(ctr), avgRate: (\(Int(avgRateMb))KB/s), timePassed: \(Int(timeIntervalTotal))sec") + } + + // Wait for second Consumer Task to complete + try await group.next() + // Shutdown the serviceGroup + await serviceGroup2.triggerGracefulShutdown() + } + } + + Benchmark("SwiftKafkaConsumer - with offset commit (messages: \(messageCount))") { benchmark in + let uniqueGroupID = UUID().uuidString + var consumerConfig = KafkaConsumerConfiguration( + consumptionStrategy: .group( + id: uniqueGroupID, + topics: [uniqueTestTopic] + ), + bootstrapBrokerAddresses: [brokerAddress] + ) + consumerConfig.autoOffsetReset = .beginning + consumerConfig.broker.addressFamily = .v4 + consumerConfig.isAutoCommitEnabled = false + + let consumer = try KafkaConsumer( + configuration: consumerConfig, + logger: .perfLogger + ) + + let serviceGroupConfiguration = ServiceGroupConfiguration(services: [consumer], gracefulShutdownSignals: [.sigterm, .sigint], logger: .perfLogger) + let serviceGroup = ServiceGroup(configuration: serviceGroupConfiguration) + + try await withThrowingTaskGroup(of: Void.self) { group in + benchLog("Start consuming") + defer { + benchLog("Finish consuming") + } + // Run Task + group.addTask { + try await serviceGroup.run() + } + + // Second Consumer Task + group.addTask { + var ctr: UInt64 = 0 + var tmpCtr: UInt64 = 0 + let interval: UInt64 = Swift.max(UInt64(messageCount / 20), 1) + let totalStartDate = Date.timeIntervalSinceReferenceDate + var totalBytes: UInt64 = 0 + + try await benchmark.withMeasurement { + for try await record in consumer.messages { + try await consumer.commit(record) + + ctr += 1 + totalBytes += UInt64(record.value.readableBytes) + + tmpCtr += 1 + if tmpCtr >= interval { + benchLog("read \(ctr * 100 / UInt64(messageCount))%") + tmpCtr = 0 + } + if ctr >= messageCount { + break + } + } + } + + let timeIntervalTotal = Date.timeIntervalSinceReferenceDate - totalStartDate + let avgRateMb = Double(totalBytes) / timeIntervalTotal / 1024 + benchLog("All read up to ctr: \(ctr), avgRate: (\(Int(avgRateMb))KB/s), timePassed: \(Int(timeIntervalTotal))sec") + } + + // Wait for second Consumer Task to complete + try await group.next() + // Shutdown the serviceGroup + await serviceGroup.triggerGracefulShutdown() + } + } + + Benchmark("librdkafka - basic consumer (messages: \(messageCount))") { benchmark in + let uniqueGroupID = UUID().uuidString + let rdKafkaConsumerConfig: [String: String] = [ + "group.id": uniqueGroupID, + "bootstrap.servers": "\(brokerAddress.host):\(brokerAddress.port)", + "broker.address.family": "v4", + "auto.offset.reset": "beginning", + ] + + let configPointer: OpaquePointer = rd_kafka_conf_new() + for (key, value) in rdKafkaConsumerConfig { + precondition(rd_kafka_conf_set(configPointer, key, value, nil, 0) == RD_KAFKA_CONF_OK) + } + + let kafkaHandle = rd_kafka_new(RD_KAFKA_CONSUMER, configPointer, nil, 0) + guard let kafkaHandle else { + preconditionFailure("Kafka handle was not created") + } + defer { + rd_kafka_destroy(kafkaHandle) + } + + rd_kafka_poll_set_consumer(kafkaHandle) + let subscriptionList = rd_kafka_topic_partition_list_new(1) + defer { + rd_kafka_topic_partition_list_destroy(subscriptionList) + } + rd_kafka_topic_partition_list_add( + subscriptionList, + uniqueTestTopic, + RD_KAFKA_PARTITION_UA + ) + rd_kafka_subscribe(kafkaHandle, subscriptionList) + rd_kafka_poll(kafkaHandle, 0) + + var ctr: UInt64 = 0 + var tmpCtr: UInt64 = 0 + + let interval: UInt64 = Swift.max(UInt64(messageCount / 20), 1) + let totalStartDate = Date.timeIntervalSinceReferenceDate + var totalBytes: UInt64 = 0 + + benchmark.withMeasurement { + while ctr < messageCount { + guard let record = rd_kafka_consumer_poll(kafkaHandle, 10) else { + continue + } + defer { + rd_kafka_message_destroy(record) + } + ctr += 1 + totalBytes += UInt64(record.pointee.len) + + tmpCtr += 1 + if tmpCtr >= interval { + benchLog("read \(ctr * 100 / UInt64(messageCount))%") + tmpCtr = 0 + } + } + } + + rd_kafka_consumer_close(kafkaHandle) + + let timeIntervalTotal = Date.timeIntervalSinceReferenceDate - totalStartDate + let avgRateMb = Double(totalBytes) / timeIntervalTotal / 1024 + benchLog("All read up to ctr: \(ctr), avgRate: (\(Int(avgRateMb))KB/s), timePassed: \(Int(timeIntervalTotal))sec") + } + + Benchmark("librdkafka - with offset commit (messages: \(messageCount))") { benchmark in + let uniqueGroupID = UUID().uuidString + let rdKafkaConsumerConfig: [String: String] = [ + "group.id": uniqueGroupID, + "bootstrap.servers": "\(brokerAddress.host):\(brokerAddress.port)", + "broker.address.family": "v4", + "auto.offset.reset": "beginning", + "enable.auto.commit": "false", + ] + + let configPointer: OpaquePointer = rd_kafka_conf_new() + for (key, value) in rdKafkaConsumerConfig { + precondition(rd_kafka_conf_set(configPointer, key, value, nil, 0) == RD_KAFKA_CONF_OK) + } + + let kafkaHandle = rd_kafka_new(RD_KAFKA_CONSUMER, configPointer, nil, 0) + guard let kafkaHandle else { + preconditionFailure("Kafka handle was not created") + } + defer { + rd_kafka_destroy(kafkaHandle) + } + + rd_kafka_poll_set_consumer(kafkaHandle) + let subscriptionList = rd_kafka_topic_partition_list_new(1) + defer { + rd_kafka_topic_partition_list_destroy(subscriptionList) + } + rd_kafka_topic_partition_list_add( + subscriptionList, + uniqueTestTopic, + RD_KAFKA_PARTITION_UA + ) + rd_kafka_subscribe(kafkaHandle, subscriptionList) + rd_kafka_poll(kafkaHandle, 0) + + var ctr: UInt64 = 0 + var tmpCtr: UInt64 = 0 + + let interval: UInt64 = Swift.max(UInt64(messageCount / 20), 1) + let totalStartDate = Date.timeIntervalSinceReferenceDate + var totalBytes: UInt64 = 0 + + benchmark.withMeasurement { + while ctr < messageCount { + guard let record = rd_kafka_consumer_poll(kafkaHandle, 10) else { + continue + } + defer { + rd_kafka_message_destroy(record) + } + guard record.pointee.err != RD_KAFKA_RESP_ERR__PARTITION_EOF else { + continue + } + let result = rd_kafka_commit_message(kafkaHandle, record, 0) + precondition(result == RD_KAFKA_RESP_ERR_NO_ERROR) + + ctr += 1 + totalBytes += UInt64(record.pointee.len) + + tmpCtr += 1 + if tmpCtr >= interval { + benchLog("read \(ctr * 100 / UInt64(messageCount))%") + tmpCtr = 0 + } + } + } + + rd_kafka_consumer_close(kafkaHandle) - Benchmark.teardown = {} + let timeIntervalTotal = Date.timeIntervalSinceReferenceDate - totalStartDate + let avgRateMb = Double(totalBytes) / timeIntervalTotal / 1024 + benchLog("All read up to ctr: \(ctr), avgRate: (\(Int(avgRateMb))KB/s), timePassed: \(Int(timeIntervalTotal))sec") + } } diff --git a/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/Utilities.swift b/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/Utilities.swift new file mode 100644 index 00000000..304dc1fb --- /dev/null +++ b/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/Utilities.swift @@ -0,0 +1,128 @@ +//===----------------------------------------------------------------------===// +// +// This source file is part of the swift-kafka-client open source project +// +// Copyright (c) 2022 Apple Inc. and the swift-kafka-client project authors +// Licensed under Apache License v2.0 +// +// See LICENSE.txt for license information +// See CONTRIBUTORS.txt for the list of swift-kafka-client project authors +// +// SPDX-License-Identifier: Apache-2.0 +// +//===----------------------------------------------------------------------===// + +import Benchmark +import class Foundation.ProcessInfo +import struct Foundation.UUID +import Kafka +@_spi(Internal) import Kafka +import Logging +import ServiceLifecycle + +let brokerAddress = KafkaConfiguration.BrokerAddress( + host: ProcessInfo.processInfo.environment["KAFKA_HOST"] ?? "localhost", + port: 9092 +) + +extension Logger { + static let perfLogger = { + var logger = Logger(label: "perf logger") + logger.logLevel = .critical + return logger + }() +} + +// For perf tests debugging +func benchLog(_ log: @autoclosure () -> Logger.Message) { + #if DEBUG + Logger.perfLogger.info(log()) + #endif +} + +func createTopic(partitions: Int32) throws -> String { + var basicConfig = KafkaConsumerConfiguration( + consumptionStrategy: .group(id: "no-group", topics: []), + bootstrapBrokerAddresses: [brokerAddress] + ) + basicConfig.broker.addressFamily = .v4 + + let client = try RDKafkaClient.makeClientForTopics(config: basicConfig, logger: .perfLogger) + return try client._createUniqueTopic(partitions: partitions, timeout: 10 * 1000) +} + +func deleteTopic(_ topic: String) throws { + var basicConfig = KafkaConsumerConfiguration( + consumptionStrategy: .group(id: "no-group", topics: []), + bootstrapBrokerAddresses: [brokerAddress] + ) + basicConfig.broker.addressFamily = .v4 + + let client = try RDKafkaClient.makeClientForTopics(config: basicConfig, logger: .perfLogger) + try client._deleteTopic(topic, timeout: 10 * 1000) +} + +func prepareTopic(messagesCount: UInt, partitions: Int32 = -1, logger: Logger = .perfLogger) async throws -> String { + let uniqueTestTopic = try createTopic(partitions: partitions) + + benchLog("Created topic \(uniqueTestTopic)") + + benchLog("Generating \(messagesCount) messages") + let testMessages = _createTestMessages(topic: uniqueTestTopic, count: messagesCount) + benchLog("Finish generating \(messagesCount) messages") + + var producerConfig = KafkaProducerConfiguration(bootstrapBrokerAddresses: [brokerAddress]) + producerConfig.broker.addressFamily = .v4 + + let (producer, acks) = try KafkaProducer.makeProducerWithEvents(configuration: producerConfig, logger: logger) + + let serviceGroupConfiguration = ServiceGroupConfiguration(services: [producer], gracefulShutdownSignals: [.sigterm, .sigint], logger: logger) + let serviceGroup = ServiceGroup(configuration: serviceGroupConfiguration) + + try await withThrowingTaskGroup(of: Void.self) { group in + benchLog("Start producing \(messagesCount) messages") + defer { + benchLog("Finish producing") + } + // Run Task + group.addTask { + try await serviceGroup.run() + } + + // Producer Task + group.addTask { + try await _sendAndAcknowledgeMessages( + producer: producer, + events: acks, + messages: testMessages, + skipConsistencyCheck: true + ) + } + + // Wait for Producer Task to complete + try await group.next() + await serviceGroup.triggerGracefulShutdown() + } + + return uniqueTestTopic +} + +extension Benchmark { + @discardableResult + func withMeasurement(_ body: () throws -> T) rethrows -> T { + self.startMeasurement() + defer { + self.stopMeasurement() + } + return try body() + } + + @discardableResult + func withMeasurement(_ body: () async throws -> T) async rethrows -> T { + self.startMeasurement() + defer { + self.stopMeasurement() + } + return try await body() + } +} diff --git a/Sources/Kafka/ForTesting/RDKafkaClient+Topic.swift b/Sources/Kafka/ForTesting/RDKafkaClient+Topic.swift new file mode 100644 index 00000000..58f2453d --- /dev/null +++ b/Sources/Kafka/ForTesting/RDKafkaClient+Topic.swift @@ -0,0 +1,157 @@ +//===----------------------------------------------------------------------===// +// +// This source file is part of the swift-kafka-client open source project +// +// Copyright (c) 2022 Apple Inc. and the swift-kafka-client project authors +// Licensed under Apache License v2.0 +// +// See LICENSE.txt for license information +// See CONTRIBUTORS.txt for the list of swift-kafka-client project authors +// +// SPDX-License-Identifier: Apache-2.0 +// +//===----------------------------------------------------------------------===// + +import Crdkafka +import struct Foundation.UUID +import Logging + +@_spi(Internal) +extension RDKafkaClient { + /// Create a topic with a unique name (`UUID`). + /// Blocks for a maximum of `timeout` milliseconds. + /// - Parameter partitions: Partitions in topic (default: -1 - default for broker) + /// - Parameter timeout: Timeout in milliseconds. + /// - Returns: Name of newly created topic. + /// - Throws: A ``KafkaError`` if the topic creation failed. + public func _createUniqueTopic(partitions: Int32 = -1, timeout: Int32) throws -> String { + let uniqueTopicName = UUID().uuidString + + let errorChars = UnsafeMutablePointer.allocate(capacity: RDKafkaClient.stringSize) + defer { errorChars.deallocate() } + + guard let newTopic = rd_kafka_NewTopic_new( + uniqueTopicName, + partitions, + -1, // use default replication_factor + errorChars, + RDKafkaClient.stringSize + ) else { + let errorString = String(cString: errorChars) + throw KafkaError.topicCreation(reason: errorString) + } + defer { rd_kafka_NewTopic_destroy(newTopic) } + + try self.withKafkaHandlePointer { kafkaHandle in + let resultQueue = rd_kafka_queue_new(kafkaHandle) + defer { rd_kafka_queue_destroy(resultQueue) } + + var newTopicsArray: [OpaquePointer?] = [newTopic] + rd_kafka_CreateTopics( + kafkaHandle, + &newTopicsArray, + 1, + nil, + resultQueue + ) + + guard let resultEvent = rd_kafka_queue_poll(resultQueue, timeout) else { + throw KafkaError.topicCreation(reason: "No CreateTopics result after 10s timeout") + } + defer { rd_kafka_event_destroy(resultEvent) } + + let resultCode = rd_kafka_event_error(resultEvent) + guard resultCode == RD_KAFKA_RESP_ERR_NO_ERROR else { + throw KafkaError.rdKafkaError(wrapping: resultCode) + } + + guard let topicsResultEvent = rd_kafka_event_CreateTopics_result(resultEvent) else { + throw KafkaError.topicCreation(reason: "Received event that is not of type rd_kafka_CreateTopics_result_t") + } + + var resultTopicCount = 0 + let topicResults = rd_kafka_CreateTopics_result_topics( + topicsResultEvent, + &resultTopicCount + ) + + guard resultTopicCount == 1, let topicResult = topicResults?[0] else { + throw KafkaError.topicCreation(reason: "Received less/more than one topic result") + } + + let topicResultError = rd_kafka_topic_result_error(topicResult) + guard topicResultError == RD_KAFKA_RESP_ERR_NO_ERROR else { + throw KafkaError.rdKafkaError(wrapping: topicResultError) + } + + let receivedTopicName = String(cString: rd_kafka_topic_result_name(topicResult)) + guard receivedTopicName == uniqueTopicName else { + throw KafkaError.topicCreation(reason: "Received topic result for topic with different name") + } + } + + return uniqueTopicName + } + + /// Delete a topic. + /// Blocks for a maximum of `timeout` milliseconds. + /// - Parameter topic: Topic to delete. + /// - Parameter timeout: Timeout in milliseconds. + /// - Throws: A ``KafkaError`` if the topic deletion failed. + public func _deleteTopic(_ topic: String, timeout: Int32) throws { + let deleteTopic = rd_kafka_DeleteTopic_new(topic) + defer { rd_kafka_DeleteTopic_destroy(deleteTopic) } + + try self.withKafkaHandlePointer { kafkaHandle in + let resultQueue = rd_kafka_queue_new(kafkaHandle) + defer { rd_kafka_queue_destroy(resultQueue) } + + var deleteTopicsArray: [OpaquePointer?] = [deleteTopic] + rd_kafka_DeleteTopics( + kafkaHandle, + &deleteTopicsArray, + 1, + nil, + resultQueue + ) + + guard let resultEvent = rd_kafka_queue_poll(resultQueue, timeout) else { + throw KafkaError.topicDeletion(reason: "No DeleteTopics result after 10s timeout") + } + defer { rd_kafka_event_destroy(resultEvent) } + + let resultCode = rd_kafka_event_error(resultEvent) + guard resultCode == RD_KAFKA_RESP_ERR_NO_ERROR else { + throw KafkaError.rdKafkaError(wrapping: resultCode) + } + + guard let topicsResultEvent = rd_kafka_event_DeleteTopics_result(resultEvent) else { + throw KafkaError.topicDeletion(reason: "Received event that is not of type rd_kafka_DeleteTopics_result_t") + } + + var resultTopicCount = 0 + let topicResults = rd_kafka_DeleteTopics_result_topics( + topicsResultEvent, + &resultTopicCount + ) + + guard resultTopicCount == 1, let topicResult = topicResults?[0] else { + throw KafkaError.topicDeletion(reason: "Received less/more than one topic result") + } + + let topicResultError = rd_kafka_topic_result_error(topicResult) + guard topicResultError == RD_KAFKA_RESP_ERR_NO_ERROR else { + throw KafkaError.rdKafkaError(wrapping: topicResultError) + } + + let receivedTopicName = String(cString: rd_kafka_topic_result_name(topicResult)) + guard receivedTopicName == topic else { + throw KafkaError.topicDeletion(reason: "Received topic result for topic with different name") + } + } + } + + public static func makeClientForTopics(config: KafkaConsumerConfiguration, logger: Logger) throws -> RDKafkaClient { + return try Self.makeClient(type: .consumer, configDictionary: config.dictionary, events: [], logger: logger) + } +} diff --git a/Sources/Kafka/ForTesting/TestMessages.swift b/Sources/Kafka/ForTesting/TestMessages.swift new file mode 100644 index 00000000..f9df6224 --- /dev/null +++ b/Sources/Kafka/ForTesting/TestMessages.swift @@ -0,0 +1,104 @@ +//===----------------------------------------------------------------------===// +// +// This source file is part of the swift-kafka-client open source project +// +// Copyright (c) 2022 Apple Inc. and the swift-kafka-client project authors +// Licensed under Apache License v2.0 +// +// See LICENSE.txt for license information +// See CONTRIBUTORS.txt for the list of swift-kafka-client project authors +// +// SPDX-License-Identifier: Apache-2.0 +// +//===----------------------------------------------------------------------===// +import struct Foundation.Date +import NIOCore + +@_spi(Internal) +public enum _TestMessagesError: Error { + case deliveryReportsIdsIncorrect + case deliveryReportsNotAllMessagesAcknoledged + case deliveryReportsIncorrect +} + +@_spi(Internal) +public func _createTestMessages( + topic: String, + headers: [KafkaHeader] = [], + count: UInt +) -> [KafkaProducerMessage] { + return Array(0..], + skipConsistencyCheck: Bool = false +) async throws { + var messageIDs = Set() + messageIDs.reserveCapacity(messages.count) + + for message in messages { + while true { + do { + messageIDs.insert(try producer.send(message)) + break + } catch let error as KafkaError where error.description.contains("Queue full") { + // That means we have to flush queue immediately but there is no interface for that + // producer.flush() + } + } + } + + var receivedDeliveryReports = Set() + receivedDeliveryReports.reserveCapacity(messages.count) + + for await event in events { + switch event { + case .deliveryReports(let deliveryReports): + for deliveryReport in deliveryReports { + receivedDeliveryReports.insert(deliveryReport) + } + default: + break // Ignore any other events + } + + if receivedDeliveryReports.count >= messages.count { + break + } + } + + guard Set(receivedDeliveryReports.map(\.id)) == messageIDs else { + throw _TestMessagesError.deliveryReportsIdsIncorrect + } + + let acknowledgedMessages: [KafkaAcknowledgedMessage] = receivedDeliveryReports.compactMap { + guard case .acknowledged(let receivedMessage) = $0.status else { + return nil + } + return receivedMessage + } + + guard messages.count == acknowledgedMessages.count else { + throw _TestMessagesError.deliveryReportsNotAllMessagesAcknoledged + } + if skipConsistencyCheck { + return + } + for message in messages { + guard acknowledgedMessages.contains(where: { $0.topic == message.topic }), + acknowledgedMessages.contains(where: { $0.key == ByteBuffer(string: message.key!) }), + acknowledgedMessages.contains(where: { $0.value == ByteBuffer(string: message.value) }) else { + throw _TestMessagesError.deliveryReportsIncorrect + } + } +} diff --git a/Sources/Kafka/KafkaConsumer.swift b/Sources/Kafka/KafkaConsumer.swift index 52a8d49d..feffc47b 100644 --- a/Sources/Kafka/KafkaConsumer.swift +++ b/Sources/Kafka/KafkaConsumer.swift @@ -843,8 +843,10 @@ extension KafkaConsumer { switch consumerState { case .running(let source): self.state = .running(client: client, messagePollLoopState: .waitingForMessages(source: source)) - case .suspended, .waitingForMessages, .finished: + case .suspended, .waitingForMessages: fatalError("\(#function) should not be invoked in state \(self.state)") + case .finished: + break // ok, skip action } } diff --git a/Sources/Kafka/RDKafka/RDKafkaClient.swift b/Sources/Kafka/RDKafka/RDKafkaClient.swift index 014a3e8c..512bf56a 100644 --- a/Sources/Kafka/RDKafka/RDKafkaClient.swift +++ b/Sources/Kafka/RDKafka/RDKafkaClient.swift @@ -19,7 +19,8 @@ import Logging /// Base class for ``KafkaProducer`` and ``KafkaConsumer``, /// which is used to handle the connection to the Kafka ecosystem. -final class RDKafkaClient: Sendable { +@_spi(Internal) +public final class RDKafkaClient: Sendable { // Default size for Strings returned from C API static let stringSize = 1024 diff --git a/Tests/IntegrationTests/KafkaTests.swift b/Tests/IntegrationTests/KafkaTests.swift index e6cf82e5..525b9461 100644 --- a/Tests/IntegrationTests/KafkaTests.swift +++ b/Tests/IntegrationTests/KafkaTests.swift @@ -14,6 +14,7 @@ import struct Foundation.UUID @testable import Kafka +@_spi(Internal) import Kafka import NIOCore import ServiceLifecycle import XCTest @@ -79,7 +80,9 @@ final class KafkaTests: XCTestCase { events: [], logger: .kafkaTest ) - try client._deleteTopic(self.uniqueTestTopic, timeout: 10 * 1000) + if let uniqueTestTopic = self.uniqueTestTopic { + try client._deleteTopic(uniqueTestTopic, timeout: 10 * 1000) + } self.bootstrapBrokerAddress = nil self.producerConfig = nil @@ -606,14 +609,7 @@ final class KafkaTests: XCTestCase { headers: [KafkaHeader] = [], count: UInt ) -> [KafkaProducerMessage] { - return Array(0..] ) async throws { - var messageIDs = Set() - - for message in messages { - messageIDs.insert(try producer.send(message)) - } - - var receivedDeliveryReports = Set() - - for await event in events { - switch event { - case .deliveryReports(let deliveryReports): - for deliveryReport in deliveryReports { - receivedDeliveryReports.insert(deliveryReport) - } - default: - break // Ignore any other events - } - - if receivedDeliveryReports.count >= messages.count { - break - } - } - - XCTAssertEqual(Set(receivedDeliveryReports.map(\.id)), messageIDs) - - let acknowledgedMessages: [KafkaAcknowledgedMessage] = receivedDeliveryReports.compactMap { - guard case .acknowledged(let receivedMessage) = $0.status else { - return nil - } - return receivedMessage - } - - XCTAssertEqual(messages.count, acknowledgedMessages.count) - for message in messages { - XCTAssertTrue(acknowledgedMessages.contains(where: { $0.topic == message.topic })) - XCTAssertTrue(acknowledgedMessages.contains(where: { $0.key == ByteBuffer(string: message.key!) })) - XCTAssertTrue(acknowledgedMessages.contains(where: { $0.value == ByteBuffer(string: message.value) })) - } + return try await _sendAndAcknowledgeMessages(producer: producer, events: events, messages: messages) } } diff --git a/Tests/IntegrationTests/Utilities.swift b/Tests/IntegrationTests/Utilities.swift index db86c0a0..94dbf374 100644 --- a/Tests/IntegrationTests/Utilities.swift +++ b/Tests/IntegrationTests/Utilities.swift @@ -12,9 +12,7 @@ // //===----------------------------------------------------------------------===// -import Crdkafka import struct Foundation.UUID -@testable import Kafka import Logging extension Logger { @@ -24,159 +22,3 @@ extension Logger { return logger } } - -extension RDKafkaClient { -// func createUniqueTopic(timeout: Int32 = 10000) async throws -> String { -// try await withCheckedThrowingContinuation { continuation in -// do { -// let uniqueTopic = try self._createUniqueTopic(timeout: timeout) -// continuation.resume(returning: uniqueTopic) -// } catch { -// continuation.resume(throwing: error) -// } -// } -// } - - /// Create a topic with a unique name (`UUID`). - /// Blocks for a maximum of `timeout` milliseconds. - /// - Parameter timeout: Timeout in milliseconds. - /// - Returns: Name of newly created topic. - /// - Throws: A ``KafkaError`` if the topic creation failed. - func _createUniqueTopic(timeout: Int32) throws -> String { - let uniqueTopicName = UUID().uuidString - - let errorChars = UnsafeMutablePointer.allocate(capacity: RDKafkaClient.stringSize) - defer { errorChars.deallocate() } - - guard let newTopic = rd_kafka_NewTopic_new( - uniqueTopicName, - -1, // use default num_partitions - -1, // use default replication_factor - errorChars, - RDKafkaClient.stringSize - ) else { - let errorString = String(cString: errorChars) - throw KafkaError.topicCreation(reason: errorString) - } - defer { rd_kafka_NewTopic_destroy(newTopic) } - - try self.withKafkaHandlePointer { kafkaHandle in - let resultQueue = rd_kafka_queue_new(kafkaHandle) - defer { rd_kafka_queue_destroy(resultQueue) } - - var newTopicsArray: [OpaquePointer?] = [newTopic] - rd_kafka_CreateTopics( - kafkaHandle, - &newTopicsArray, - 1, - nil, - resultQueue - ) - - guard let resultEvent = rd_kafka_queue_poll(resultQueue, timeout) else { - throw KafkaError.topicCreation(reason: "No CreateTopics result after 10s timeout") - } - defer { rd_kafka_event_destroy(resultEvent) } - - let resultCode = rd_kafka_event_error(resultEvent) - guard resultCode == RD_KAFKA_RESP_ERR_NO_ERROR else { - throw KafkaError.rdKafkaError(wrapping: resultCode) - } - - guard let topicsResultEvent = rd_kafka_event_CreateTopics_result(resultEvent) else { - throw KafkaError.topicCreation(reason: "Received event that is not of type rd_kafka_CreateTopics_result_t") - } - - var resultTopicCount = 0 - let topicResults = rd_kafka_CreateTopics_result_topics( - topicsResultEvent, - &resultTopicCount - ) - - guard resultTopicCount == 1, let topicResult = topicResults?[0] else { - throw KafkaError.topicCreation(reason: "Received less/more than one topic result") - } - - let topicResultError = rd_kafka_topic_result_error(topicResult) - guard topicResultError == RD_KAFKA_RESP_ERR_NO_ERROR else { - throw KafkaError.rdKafkaError(wrapping: topicResultError) - } - - let receivedTopicName = String(cString: rd_kafka_topic_result_name(topicResult)) - guard receivedTopicName == uniqueTopicName else { - throw KafkaError.topicCreation(reason: "Received topic result for topic with different name") - } - } - - return uniqueTopicName - } - -// func deleteTopic(_ topic: String, timeout: Int32 = 10000) async throws { -// try await withCheckedThrowingContinuation { continuation in -// do { -// try self._deleteTopic(topic, timeout: timeout) -// continuation.resume() -// } catch { -// continuation.resume(throwing: error) -// } -// } -// } - - /// Delete a topic. - /// Blocks for a maximum of `timeout` milliseconds. - /// - Parameter topic: Topic to delete. - /// - Parameter timeout: Timeout in milliseconds. - /// - Throws: A ``KafkaError`` if the topic deletion failed. - func _deleteTopic(_ topic: String, timeout: Int32) throws { - let deleteTopic = rd_kafka_DeleteTopic_new(topic) - defer { rd_kafka_DeleteTopic_destroy(deleteTopic) } - - try self.withKafkaHandlePointer { kafkaHandle in - let resultQueue = rd_kafka_queue_new(kafkaHandle) - defer { rd_kafka_queue_destroy(resultQueue) } - - var deleteTopicsArray: [OpaquePointer?] = [deleteTopic] - rd_kafka_DeleteTopics( - kafkaHandle, - &deleteTopicsArray, - 1, - nil, - resultQueue - ) - - guard let resultEvent = rd_kafka_queue_poll(resultQueue, timeout) else { - throw KafkaError.topicDeletion(reason: "No DeleteTopics result after 10s timeout") - } - defer { rd_kafka_event_destroy(resultEvent) } - - let resultCode = rd_kafka_event_error(resultEvent) - guard resultCode == RD_KAFKA_RESP_ERR_NO_ERROR else { - throw KafkaError.rdKafkaError(wrapping: resultCode) - } - - guard let topicsResultEvent = rd_kafka_event_DeleteTopics_result(resultEvent) else { - throw KafkaError.topicDeletion(reason: "Received event that is not of type rd_kafka_DeleteTopics_result_t") - } - - var resultTopicCount = 0 - let topicResults = rd_kafka_DeleteTopics_result_topics( - topicsResultEvent, - &resultTopicCount - ) - - guard resultTopicCount == 1, let topicResult = topicResults?[0] else { - throw KafkaError.topicDeletion(reason: "Received less/more than one topic result") - } - - let topicResultError = rd_kafka_topic_result_error(topicResult) - guard topicResultError == RD_KAFKA_RESP_ERR_NO_ERROR else { - throw KafkaError.rdKafkaError(wrapping: topicResultError) - } - - let receivedTopicName = String(cString: rd_kafka_topic_result_name(topicResult)) - guard receivedTopicName == topic else { - throw KafkaError.topicDeletion(reason: "Received topic result for topic with different name") - } - } - } -} From 57a349ef67161c14f7ac5e7b172d3011f957a575 Mon Sep 17 00:00:00 2001 From: BlindSpot <127803250+blindspotbounty@users.noreply.github.com> Date: Wed, 29 Nov 2023 10:33:25 +0200 Subject: [PATCH 02/25] attempty to speedup benchmarks --- .../SwiftKafkaConsumerBenchmarks/KafkaConsumerBenchmark.swift | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/KafkaConsumerBenchmark.swift b/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/KafkaConsumerBenchmark.swift index d7d41cb4..e4b86093 100644 --- a/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/KafkaConsumerBenchmark.swift +++ b/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/KafkaConsumerBenchmark.swift @@ -55,6 +55,8 @@ let benchmarks = { ) consumerConfig.autoOffsetReset = .beginning consumerConfig.broker.addressFamily = .v4 + // We must specify it at least 10 otherwise CI will timeout + consumerConfig.pollInterval = .milliseconds(10) let consumer = try KafkaConsumer( configuration: consumerConfig, @@ -122,6 +124,8 @@ let benchmarks = { consumerConfig.autoOffsetReset = .beginning consumerConfig.broker.addressFamily = .v4 consumerConfig.isAutoCommitEnabled = false + // We must specify it at least 10 otherwise CI will timeout + consumerConfig.pollInterval = .milliseconds(10) let consumer = try KafkaConsumer( configuration: consumerConfig, From 0123cafd9337daa1cf452666cd2c5160987e6e45 Mon Sep 17 00:00:00 2001 From: BlindSpot <127803250+blindspotbounty@users.noreply.github.com> Date: Wed, 29 Nov 2023 10:41:31 +0200 Subject: [PATCH 03/25] check CI works for one test --- .../KafkaConsumerBenchmark.swift | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/KafkaConsumerBenchmark.swift b/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/KafkaConsumerBenchmark.swift index e4b86093..05fe05ef 100644 --- a/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/KafkaConsumerBenchmark.swift +++ b/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/KafkaConsumerBenchmark.swift @@ -63,8 +63,8 @@ let benchmarks = { logger: .perfLogger ) - let serviceGroupConfiguration2 = ServiceGroupConfiguration(services: [consumer], gracefulShutdownSignals: [.sigterm, .sigint], logger: .perfLogger) - let serviceGroup2 = ServiceGroup(configuration: serviceGroupConfiguration2) + let serviceGroupConfiguration = ServiceGroupConfiguration(services: [consumer], gracefulShutdownSignals: [.sigterm, .sigint], logger: .perfLogger) + let serviceGroup = ServiceGroup(configuration: serviceGroupConfiguration) try await withThrowingTaskGroup(of: Void.self) { group in benchLog("Start consuming") @@ -73,7 +73,7 @@ let benchmarks = { } // Run Task group.addTask { - try await serviceGroup2.run() + try await serviceGroup.run() } // Second Consumer Task @@ -108,10 +108,10 @@ let benchmarks = { // Wait for second Consumer Task to complete try await group.next() // Shutdown the serviceGroup - await serviceGroup2.triggerGracefulShutdown() + await serviceGroup.triggerGracefulShutdown() } } - +/* Benchmark("SwiftKafkaConsumer - with offset commit (messages: \(messageCount))") { benchmark in let uniqueGroupID = UUID().uuidString var consumerConfig = KafkaConsumerConfiguration( @@ -325,4 +325,5 @@ let benchmarks = { let avgRateMb = Double(totalBytes) / timeIntervalTotal / 1024 benchLog("All read up to ctr: \(ctr), avgRate: (\(Int(avgRateMb))KB/s), timePassed: \(Int(timeIntervalTotal))sec") } + */ } From 61b7aa573150650db2b0371cb2ed8d7aa7970977 Mon Sep 17 00:00:00 2001 From: BlindSpot <127803250+blindspotbounty@users.noreply.github.com> Date: Wed, 29 Nov 2023 13:16:19 +0200 Subject: [PATCH 04/25] enable one more test --- .../KafkaConsumerBenchmark.swift | 255 +++++++++--------- 1 file changed, 128 insertions(+), 127 deletions(-) diff --git a/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/KafkaConsumerBenchmark.swift b/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/KafkaConsumerBenchmark.swift index 05fe05ef..68ed88da 100644 --- a/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/KafkaConsumerBenchmark.swift +++ b/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/KafkaConsumerBenchmark.swift @@ -111,7 +111,7 @@ let benchmarks = { await serviceGroup.triggerGracefulShutdown() } } -/* + Benchmark("SwiftKafkaConsumer - with offset commit (messages: \(messageCount))") { benchmark in let uniqueGroupID = UUID().uuidString var consumerConfig = KafkaConsumerConfiguration( @@ -183,147 +183,148 @@ let benchmarks = { } } - Benchmark("librdkafka - basic consumer (messages: \(messageCount))") { benchmark in - let uniqueGroupID = UUID().uuidString - let rdKafkaConsumerConfig: [String: String] = [ - "group.id": uniqueGroupID, - "bootstrap.servers": "\(brokerAddress.host):\(brokerAddress.port)", - "broker.address.family": "v4", - "auto.offset.reset": "beginning", - ] - - let configPointer: OpaquePointer = rd_kafka_conf_new() - for (key, value) in rdKafkaConsumerConfig { - precondition(rd_kafka_conf_set(configPointer, key, value, nil, 0) == RD_KAFKA_CONF_OK) - } - - let kafkaHandle = rd_kafka_new(RD_KAFKA_CONSUMER, configPointer, nil, 0) - guard let kafkaHandle else { - preconditionFailure("Kafka handle was not created") - } - defer { - rd_kafka_destroy(kafkaHandle) - } - - rd_kafka_poll_set_consumer(kafkaHandle) - let subscriptionList = rd_kafka_topic_partition_list_new(1) - defer { - rd_kafka_topic_partition_list_destroy(subscriptionList) - } - rd_kafka_topic_partition_list_add( - subscriptionList, - uniqueTestTopic, - RD_KAFKA_PARTITION_UA - ) - rd_kafka_subscribe(kafkaHandle, subscriptionList) - rd_kafka_poll(kafkaHandle, 0) - - var ctr: UInt64 = 0 - var tmpCtr: UInt64 = 0 + /* + Benchmark("librdkafka - basic consumer (messages: \(messageCount))") { benchmark in + let uniqueGroupID = UUID().uuidString + let rdKafkaConsumerConfig: [String: String] = [ + "group.id": uniqueGroupID, + "bootstrap.servers": "\(brokerAddress.host):\(brokerAddress.port)", + "broker.address.family": "v4", + "auto.offset.reset": "beginning", + ] + + let configPointer: OpaquePointer = rd_kafka_conf_new() + for (key, value) in rdKafkaConsumerConfig { + precondition(rd_kafka_conf_set(configPointer, key, value, nil, 0) == RD_KAFKA_CONF_OK) + } - let interval: UInt64 = Swift.max(UInt64(messageCount / 20), 1) - let totalStartDate = Date.timeIntervalSinceReferenceDate - var totalBytes: UInt64 = 0 + let kafkaHandle = rd_kafka_new(RD_KAFKA_CONSUMER, configPointer, nil, 0) + guard let kafkaHandle else { + preconditionFailure("Kafka handle was not created") + } + defer { + rd_kafka_destroy(kafkaHandle) + } - benchmark.withMeasurement { - while ctr < messageCount { - guard let record = rd_kafka_consumer_poll(kafkaHandle, 10) else { - continue - } - defer { - rd_kafka_message_destroy(record) - } - ctr += 1 - totalBytes += UInt64(record.pointee.len) + rd_kafka_poll_set_consumer(kafkaHandle) + let subscriptionList = rd_kafka_topic_partition_list_new(1) + defer { + rd_kafka_topic_partition_list_destroy(subscriptionList) + } + rd_kafka_topic_partition_list_add( + subscriptionList, + uniqueTestTopic, + RD_KAFKA_PARTITION_UA + ) + rd_kafka_subscribe(kafkaHandle, subscriptionList) + rd_kafka_poll(kafkaHandle, 0) + + var ctr: UInt64 = 0 + var tmpCtr: UInt64 = 0 + + let interval: UInt64 = Swift.max(UInt64(messageCount / 20), 1) + let totalStartDate = Date.timeIntervalSinceReferenceDate + var totalBytes: UInt64 = 0 + + benchmark.withMeasurement { + while ctr < messageCount { + guard let record = rd_kafka_consumer_poll(kafkaHandle, 10) else { + continue + } + defer { + rd_kafka_message_destroy(record) + } + ctr += 1 + totalBytes += UInt64(record.pointee.len) - tmpCtr += 1 - if tmpCtr >= interval { - benchLog("read \(ctr * 100 / UInt64(messageCount))%") - tmpCtr = 0 + tmpCtr += 1 + if tmpCtr >= interval { + benchLog("read \(ctr * 100 / UInt64(messageCount))%") + tmpCtr = 0 + } } } - } - - rd_kafka_consumer_close(kafkaHandle) - let timeIntervalTotal = Date.timeIntervalSinceReferenceDate - totalStartDate - let avgRateMb = Double(totalBytes) / timeIntervalTotal / 1024 - benchLog("All read up to ctr: \(ctr), avgRate: (\(Int(avgRateMb))KB/s), timePassed: \(Int(timeIntervalTotal))sec") - } + rd_kafka_consumer_close(kafkaHandle) - Benchmark("librdkafka - with offset commit (messages: \(messageCount))") { benchmark in - let uniqueGroupID = UUID().uuidString - let rdKafkaConsumerConfig: [String: String] = [ - "group.id": uniqueGroupID, - "bootstrap.servers": "\(brokerAddress.host):\(brokerAddress.port)", - "broker.address.family": "v4", - "auto.offset.reset": "beginning", - "enable.auto.commit": "false", - ] - - let configPointer: OpaquePointer = rd_kafka_conf_new() - for (key, value) in rdKafkaConsumerConfig { - precondition(rd_kafka_conf_set(configPointer, key, value, nil, 0) == RD_KAFKA_CONF_OK) - } - - let kafkaHandle = rd_kafka_new(RD_KAFKA_CONSUMER, configPointer, nil, 0) - guard let kafkaHandle else { - preconditionFailure("Kafka handle was not created") - } - defer { - rd_kafka_destroy(kafkaHandle) - } - - rd_kafka_poll_set_consumer(kafkaHandle) - let subscriptionList = rd_kafka_topic_partition_list_new(1) - defer { - rd_kafka_topic_partition_list_destroy(subscriptionList) + let timeIntervalTotal = Date.timeIntervalSinceReferenceDate - totalStartDate + let avgRateMb = Double(totalBytes) / timeIntervalTotal / 1024 + benchLog("All read up to ctr: \(ctr), avgRate: (\(Int(avgRateMb))KB/s), timePassed: \(Int(timeIntervalTotal))sec") } - rd_kafka_topic_partition_list_add( - subscriptionList, - uniqueTestTopic, - RD_KAFKA_PARTITION_UA - ) - rd_kafka_subscribe(kafkaHandle, subscriptionList) - rd_kafka_poll(kafkaHandle, 0) - var ctr: UInt64 = 0 - var tmpCtr: UInt64 = 0 + Benchmark("librdkafka - with offset commit (messages: \(messageCount))") { benchmark in + let uniqueGroupID = UUID().uuidString + let rdKafkaConsumerConfig: [String: String] = [ + "group.id": uniqueGroupID, + "bootstrap.servers": "\(brokerAddress.host):\(brokerAddress.port)", + "broker.address.family": "v4", + "auto.offset.reset": "beginning", + "enable.auto.commit": "false", + ] + + let configPointer: OpaquePointer = rd_kafka_conf_new() + for (key, value) in rdKafkaConsumerConfig { + precondition(rd_kafka_conf_set(configPointer, key, value, nil, 0) == RD_KAFKA_CONF_OK) + } - let interval: UInt64 = Swift.max(UInt64(messageCount / 20), 1) - let totalStartDate = Date.timeIntervalSinceReferenceDate - var totalBytes: UInt64 = 0 + let kafkaHandle = rd_kafka_new(RD_KAFKA_CONSUMER, configPointer, nil, 0) + guard let kafkaHandle else { + preconditionFailure("Kafka handle was not created") + } + defer { + rd_kafka_destroy(kafkaHandle) + } - benchmark.withMeasurement { - while ctr < messageCount { - guard let record = rd_kafka_consumer_poll(kafkaHandle, 10) else { - continue - } - defer { - rd_kafka_message_destroy(record) - } - guard record.pointee.err != RD_KAFKA_RESP_ERR__PARTITION_EOF else { - continue - } - let result = rd_kafka_commit_message(kafkaHandle, record, 0) - precondition(result == RD_KAFKA_RESP_ERR_NO_ERROR) + rd_kafka_poll_set_consumer(kafkaHandle) + let subscriptionList = rd_kafka_topic_partition_list_new(1) + defer { + rd_kafka_topic_partition_list_destroy(subscriptionList) + } + rd_kafka_topic_partition_list_add( + subscriptionList, + uniqueTestTopic, + RD_KAFKA_PARTITION_UA + ) + rd_kafka_subscribe(kafkaHandle, subscriptionList) + rd_kafka_poll(kafkaHandle, 0) + + var ctr: UInt64 = 0 + var tmpCtr: UInt64 = 0 + + let interval: UInt64 = Swift.max(UInt64(messageCount / 20), 1) + let totalStartDate = Date.timeIntervalSinceReferenceDate + var totalBytes: UInt64 = 0 + + benchmark.withMeasurement { + while ctr < messageCount { + guard let record = rd_kafka_consumer_poll(kafkaHandle, 10) else { + continue + } + defer { + rd_kafka_message_destroy(record) + } + guard record.pointee.err != RD_KAFKA_RESP_ERR__PARTITION_EOF else { + continue + } + let result = rd_kafka_commit_message(kafkaHandle, record, 0) + precondition(result == RD_KAFKA_RESP_ERR_NO_ERROR) - ctr += 1 - totalBytes += UInt64(record.pointee.len) + ctr += 1 + totalBytes += UInt64(record.pointee.len) - tmpCtr += 1 - if tmpCtr >= interval { - benchLog("read \(ctr * 100 / UInt64(messageCount))%") - tmpCtr = 0 + tmpCtr += 1 + if tmpCtr >= interval { + benchLog("read \(ctr * 100 / UInt64(messageCount))%") + tmpCtr = 0 + } } } - } - rd_kafka_consumer_close(kafkaHandle) + rd_kafka_consumer_close(kafkaHandle) - let timeIntervalTotal = Date.timeIntervalSinceReferenceDate - totalStartDate - let avgRateMb = Double(totalBytes) / timeIntervalTotal / 1024 - benchLog("All read up to ctr: \(ctr), avgRate: (\(Int(avgRateMb))KB/s), timePassed: \(Int(timeIntervalTotal))sec") - } - */ + let timeIntervalTotal = Date.timeIntervalSinceReferenceDate - totalStartDate + let avgRateMb = Double(totalBytes) / timeIntervalTotal / 1024 + benchLog("All read up to ctr: \(ctr), avgRate: (\(Int(avgRateMb))KB/s), timePassed: \(Int(timeIntervalTotal))sec") + } + */ } From 376b30cb8cca623239eaecdfb20be789f44207f1 Mon Sep 17 00:00:00 2001 From: BlindSpot <127803250+blindspotbounty@users.noreply.github.com> Date: Wed, 29 Nov 2023 18:20:44 +0200 Subject: [PATCH 05/25] try to lower poll interval --- .../SwiftKafkaConsumerBenchmarks/KafkaConsumerBenchmark.swift | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/KafkaConsumerBenchmark.swift b/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/KafkaConsumerBenchmark.swift index 68ed88da..03a6b961 100644 --- a/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/KafkaConsumerBenchmark.swift +++ b/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/KafkaConsumerBenchmark.swift @@ -56,7 +56,7 @@ let benchmarks = { consumerConfig.autoOffsetReset = .beginning consumerConfig.broker.addressFamily = .v4 // We must specify it at least 10 otherwise CI will timeout - consumerConfig.pollInterval = .milliseconds(10) + consumerConfig.pollInterval = .milliseconds(1) let consumer = try KafkaConsumer( configuration: consumerConfig, @@ -125,7 +125,7 @@ let benchmarks = { consumerConfig.broker.addressFamily = .v4 consumerConfig.isAutoCommitEnabled = false // We must specify it at least 10 otherwise CI will timeout - consumerConfig.pollInterval = .milliseconds(10) + consumerConfig.pollInterval = .milliseconds(1) let consumer = try KafkaConsumer( configuration: consumerConfig, From 01a94481595c5c5b7f5e0e2a2b6c82d4a214bd16 Mon Sep 17 00:00:00 2001 From: BlindSpot <127803250+blindspotbounty@users.noreply.github.com> Date: Thu, 30 Nov 2023 11:33:05 +0200 Subject: [PATCH 06/25] adjust max duration of test --- .../SwiftKafkaConsumerBenchmarks/KafkaConsumerBenchmark.swift | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/KafkaConsumerBenchmark.swift b/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/KafkaConsumerBenchmark.swift index 03a6b961..390548cb 100644 --- a/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/KafkaConsumerBenchmark.swift +++ b/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/KafkaConsumerBenchmark.swift @@ -29,7 +29,7 @@ let benchmarks = { metrics: [.wallClock, .cpuTotal, .allocatedResidentMemory, .contextSwitches, .throughput] + .arc, warmupIterations: 0, scalingFactor: .one, - maxDuration: .seconds(5), + maxDuration: .seconds(1), maxIterations: 100 ) From 274e4d987899fcb4c00ba8ace3fee7eff406e776 Mon Sep 17 00:00:00 2001 From: BlindSpot <127803250+blindspotbounty@users.noreply.github.com> Date: Fri, 1 Dec 2023 11:37:47 +0200 Subject: [PATCH 07/25] remain only manual commit test --- .../KafkaConsumerBenchmark.swift | 138 +++++++++--------- 1 file changed, 69 insertions(+), 69 deletions(-) diff --git a/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/KafkaConsumerBenchmark.swift b/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/KafkaConsumerBenchmark.swift index 390548cb..b13d478e 100644 --- a/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/KafkaConsumerBenchmark.swift +++ b/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/KafkaConsumerBenchmark.swift @@ -43,75 +43,75 @@ let benchmarks = { } uniqueTestTopic = nil } - - Benchmark("SwiftKafkaConsumer - basic consumer (messages: \(messageCount))") { benchmark in - let uniqueGroupID = UUID().uuidString - var consumerConfig = KafkaConsumerConfiguration( - consumptionStrategy: .group( - id: uniqueGroupID, - topics: [uniqueTestTopic] - ), - bootstrapBrokerAddresses: [brokerAddress] - ) - consumerConfig.autoOffsetReset = .beginning - consumerConfig.broker.addressFamily = .v4 - // We must specify it at least 10 otherwise CI will timeout - consumerConfig.pollInterval = .milliseconds(1) - - let consumer = try KafkaConsumer( - configuration: consumerConfig, - logger: .perfLogger - ) - - let serviceGroupConfiguration = ServiceGroupConfiguration(services: [consumer], gracefulShutdownSignals: [.sigterm, .sigint], logger: .perfLogger) - let serviceGroup = ServiceGroup(configuration: serviceGroupConfiguration) - - try await withThrowingTaskGroup(of: Void.self) { group in - benchLog("Start consuming") - defer { - benchLog("Finish consuming") - } - // Run Task - group.addTask { - try await serviceGroup.run() - } - - // Second Consumer Task - group.addTask { - var ctr: UInt64 = 0 - var tmpCtr: UInt64 = 0 - let interval: UInt64 = Swift.max(UInt64(messageCount / 20), 1) - let totalStartDate = Date.timeIntervalSinceReferenceDate - var totalBytes: UInt64 = 0 - - try await benchmark.withMeasurement { - for try await record in consumer.messages { - ctr += 1 - totalBytes += UInt64(record.value.readableBytes) - - tmpCtr += 1 - if tmpCtr >= interval { - benchLog("read \(ctr * 100 / UInt64(messageCount))%") - tmpCtr = 0 - } - if ctr >= messageCount { - break - } - } - } - - let timeIntervalTotal = Date.timeIntervalSinceReferenceDate - totalStartDate - let avgRateMb = Double(totalBytes) / timeIntervalTotal / 1024 - benchLog("All read up to ctr: \(ctr), avgRate: (\(Int(avgRateMb))KB/s), timePassed: \(Int(timeIntervalTotal))sec") - } - - // Wait for second Consumer Task to complete - try await group.next() - // Shutdown the serviceGroup - await serviceGroup.triggerGracefulShutdown() - } - } - + /* + Benchmark("SwiftKafkaConsumer - basic consumer (messages: \(messageCount))") { benchmark in + let uniqueGroupID = UUID().uuidString + var consumerConfig = KafkaConsumerConfiguration( + consumptionStrategy: .group( + id: uniqueGroupID, + topics: [uniqueTestTopic] + ), + bootstrapBrokerAddresses: [brokerAddress] + ) + consumerConfig.autoOffsetReset = .beginning + consumerConfig.broker.addressFamily = .v4 + // We must specify it at least 10 otherwise CI will timeout + consumerConfig.pollInterval = .milliseconds(1) + + let consumer = try KafkaConsumer( + configuration: consumerConfig, + logger: .perfLogger + ) + + let serviceGroupConfiguration = ServiceGroupConfiguration(services: [consumer], gracefulShutdownSignals: [.sigterm, .sigint], logger: .perfLogger) + let serviceGroup = ServiceGroup(configuration: serviceGroupConfiguration) + + try await withThrowingTaskGroup(of: Void.self) { group in + benchLog("Start consuming") + defer { + benchLog("Finish consuming") + } + // Run Task + group.addTask { + try await serviceGroup.run() + } + + // Second Consumer Task + group.addTask { + var ctr: UInt64 = 0 + var tmpCtr: UInt64 = 0 + let interval: UInt64 = Swift.max(UInt64(messageCount / 20), 1) + let totalStartDate = Date.timeIntervalSinceReferenceDate + var totalBytes: UInt64 = 0 + + try await benchmark.withMeasurement { + for try await record in consumer.messages { + ctr += 1 + totalBytes += UInt64(record.value.readableBytes) + + tmpCtr += 1 + if tmpCtr >= interval { + benchLog("read \(ctr * 100 / UInt64(messageCount))%") + tmpCtr = 0 + } + if ctr >= messageCount { + break + } + } + } + + let timeIntervalTotal = Date.timeIntervalSinceReferenceDate - totalStartDate + let avgRateMb = Double(totalBytes) / timeIntervalTotal / 1024 + benchLog("All read up to ctr: \(ctr), avgRate: (\(Int(avgRateMb))KB/s), timePassed: \(Int(timeIntervalTotal))sec") + } + + // Wait for second Consumer Task to complete + try await group.next() + // Shutdown the serviceGroup + await serviceGroup.triggerGracefulShutdown() + } + } + */ Benchmark("SwiftKafkaConsumer - with offset commit (messages: \(messageCount))") { benchmark in let uniqueGroupID = UUID().uuidString var consumerConfig = KafkaConsumerConfiguration( From 1811752714ebb5797b05360b8ea39fb5a6e11fdf Mon Sep 17 00:00:00 2001 From: BlindSpot <127803250+blindspotbounty@users.noreply.github.com> Date: Fri, 1 Dec 2023 12:10:45 +0200 Subject: [PATCH 08/25] check if commit is the reason for test delays --- .../SwiftKafkaConsumerBenchmarks/KafkaConsumerBenchmark.swift | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/KafkaConsumerBenchmark.swift b/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/KafkaConsumerBenchmark.swift index b13d478e..6d32c0e7 100644 --- a/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/KafkaConsumerBenchmark.swift +++ b/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/KafkaConsumerBenchmark.swift @@ -155,7 +155,7 @@ let benchmarks = { try await benchmark.withMeasurement { for try await record in consumer.messages { - try await consumer.commit(record) + try consumer.scheduleCommit(record) ctr += 1 totalBytes += UInt64(record.value.readableBytes) From 632b6b71b52c1e0f1b6c7bb5aaa5b483cf4af2dc Mon Sep 17 00:00:00 2001 From: BlindSpot <127803250+blindspotbounty@users.noreply.github.com> Date: Fri, 1 Dec 2023 12:25:03 +0200 Subject: [PATCH 09/25] try all with schedule commit --- .../KafkaConsumerBenchmark.swift | 390 +++++++++--------- 1 file changed, 194 insertions(+), 196 deletions(-) diff --git a/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/KafkaConsumerBenchmark.swift b/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/KafkaConsumerBenchmark.swift index 6d32c0e7..db658d67 100644 --- a/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/KafkaConsumerBenchmark.swift +++ b/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/KafkaConsumerBenchmark.swift @@ -43,76 +43,8 @@ let benchmarks = { } uniqueTestTopic = nil } - /* - Benchmark("SwiftKafkaConsumer - basic consumer (messages: \(messageCount))") { benchmark in - let uniqueGroupID = UUID().uuidString - var consumerConfig = KafkaConsumerConfiguration( - consumptionStrategy: .group( - id: uniqueGroupID, - topics: [uniqueTestTopic] - ), - bootstrapBrokerAddresses: [brokerAddress] - ) - consumerConfig.autoOffsetReset = .beginning - consumerConfig.broker.addressFamily = .v4 - // We must specify it at least 10 otherwise CI will timeout - consumerConfig.pollInterval = .milliseconds(1) - - let consumer = try KafkaConsumer( - configuration: consumerConfig, - logger: .perfLogger - ) - - let serviceGroupConfiguration = ServiceGroupConfiguration(services: [consumer], gracefulShutdownSignals: [.sigterm, .sigint], logger: .perfLogger) - let serviceGroup = ServiceGroup(configuration: serviceGroupConfiguration) - - try await withThrowingTaskGroup(of: Void.self) { group in - benchLog("Start consuming") - defer { - benchLog("Finish consuming") - } - // Run Task - group.addTask { - try await serviceGroup.run() - } - - // Second Consumer Task - group.addTask { - var ctr: UInt64 = 0 - var tmpCtr: UInt64 = 0 - let interval: UInt64 = Swift.max(UInt64(messageCount / 20), 1) - let totalStartDate = Date.timeIntervalSinceReferenceDate - var totalBytes: UInt64 = 0 - - try await benchmark.withMeasurement { - for try await record in consumer.messages { - ctr += 1 - totalBytes += UInt64(record.value.readableBytes) - - tmpCtr += 1 - if tmpCtr >= interval { - benchLog("read \(ctr * 100 / UInt64(messageCount))%") - tmpCtr = 0 - } - if ctr >= messageCount { - break - } - } - } - - let timeIntervalTotal = Date.timeIntervalSinceReferenceDate - totalStartDate - let avgRateMb = Double(totalBytes) / timeIntervalTotal / 1024 - benchLog("All read up to ctr: \(ctr), avgRate: (\(Int(avgRateMb))KB/s), timePassed: \(Int(timeIntervalTotal))sec") - } - - // Wait for second Consumer Task to complete - try await group.next() - // Shutdown the serviceGroup - await serviceGroup.triggerGracefulShutdown() - } - } - */ - Benchmark("SwiftKafkaConsumer - with offset commit (messages: \(messageCount))") { benchmark in + + Benchmark("SwiftKafkaConsumer - basic consumer (messages: \(messageCount))") { benchmark in let uniqueGroupID = UUID().uuidString var consumerConfig = KafkaConsumerConfiguration( consumptionStrategy: .group( @@ -123,7 +55,6 @@ let benchmarks = { ) consumerConfig.autoOffsetReset = .beginning consumerConfig.broker.addressFamily = .v4 - consumerConfig.isAutoCommitEnabled = false // We must specify it at least 10 otherwise CI will timeout consumerConfig.pollInterval = .milliseconds(1) @@ -155,8 +86,6 @@ let benchmarks = { try await benchmark.withMeasurement { for try await record in consumer.messages { - try consumer.scheduleCommit(record) - ctr += 1 totalBytes += UInt64(record.value.readableBytes) @@ -183,148 +112,217 @@ let benchmarks = { } } - /* - Benchmark("librdkafka - basic consumer (messages: \(messageCount))") { benchmark in - let uniqueGroupID = UUID().uuidString - let rdKafkaConsumerConfig: [String: String] = [ - "group.id": uniqueGroupID, - "bootstrap.servers": "\(brokerAddress.host):\(brokerAddress.port)", - "broker.address.family": "v4", - "auto.offset.reset": "beginning", - ] - - let configPointer: OpaquePointer = rd_kafka_conf_new() - for (key, value) in rdKafkaConsumerConfig { - precondition(rd_kafka_conf_set(configPointer, key, value, nil, 0) == RD_KAFKA_CONF_OK) - } + Benchmark("SwiftKafkaConsumer - with offset commit (messages: \(messageCount))") { benchmark in + let uniqueGroupID = UUID().uuidString + var consumerConfig = KafkaConsumerConfiguration( + consumptionStrategy: .group( + id: uniqueGroupID, + topics: [uniqueTestTopic] + ), + bootstrapBrokerAddresses: [brokerAddress] + ) + consumerConfig.autoOffsetReset = .beginning + consumerConfig.broker.addressFamily = .v4 + consumerConfig.isAutoCommitEnabled = false + // We must specify it at least 10 otherwise CI will timeout + consumerConfig.pollInterval = .milliseconds(1) - let kafkaHandle = rd_kafka_new(RD_KAFKA_CONSUMER, configPointer, nil, 0) - guard let kafkaHandle else { - preconditionFailure("Kafka handle was not created") - } - defer { - rd_kafka_destroy(kafkaHandle) - } + let consumer = try KafkaConsumer( + configuration: consumerConfig, + logger: .perfLogger + ) - rd_kafka_poll_set_consumer(kafkaHandle) - let subscriptionList = rd_kafka_topic_partition_list_new(1) + let serviceGroupConfiguration = ServiceGroupConfiguration(services: [consumer], gracefulShutdownSignals: [.sigterm, .sigint], logger: .perfLogger) + let serviceGroup = ServiceGroup(configuration: serviceGroupConfiguration) + + try await withThrowingTaskGroup(of: Void.self) { group in + benchLog("Start consuming") defer { - rd_kafka_topic_partition_list_destroy(subscriptionList) + benchLog("Finish consuming") } - rd_kafka_topic_partition_list_add( - subscriptionList, - uniqueTestTopic, - RD_KAFKA_PARTITION_UA - ) - rd_kafka_subscribe(kafkaHandle, subscriptionList) - rd_kafka_poll(kafkaHandle, 0) - - var ctr: UInt64 = 0 - var tmpCtr: UInt64 = 0 - - let interval: UInt64 = Swift.max(UInt64(messageCount / 20), 1) - let totalStartDate = Date.timeIntervalSinceReferenceDate - var totalBytes: UInt64 = 0 - - benchmark.withMeasurement { - while ctr < messageCount { - guard let record = rd_kafka_consumer_poll(kafkaHandle, 10) else { - continue - } - defer { - rd_kafka_message_destroy(record) - } - ctr += 1 - totalBytes += UInt64(record.pointee.len) + // Run Task + group.addTask { + try await serviceGroup.run() + } + + // Second Consumer Task + group.addTask { + var ctr: UInt64 = 0 + var tmpCtr: UInt64 = 0 + let interval: UInt64 = Swift.max(UInt64(messageCount / 20), 1) + let totalStartDate = Date.timeIntervalSinceReferenceDate + var totalBytes: UInt64 = 0 + + try await benchmark.withMeasurement { + for try await record in consumer.messages { + try consumer.scheduleCommit(record) + + ctr += 1 + totalBytes += UInt64(record.value.readableBytes) - tmpCtr += 1 - if tmpCtr >= interval { - benchLog("read \(ctr * 100 / UInt64(messageCount))%") - tmpCtr = 0 + tmpCtr += 1 + if tmpCtr >= interval { + benchLog("read \(ctr * 100 / UInt64(messageCount))%") + tmpCtr = 0 + } + if ctr >= messageCount { + break + } } } + + let timeIntervalTotal = Date.timeIntervalSinceReferenceDate - totalStartDate + let avgRateMb = Double(totalBytes) / timeIntervalTotal / 1024 + benchLog("All read up to ctr: \(ctr), avgRate: (\(Int(avgRateMb))KB/s), timePassed: \(Int(timeIntervalTotal))sec") } - rd_kafka_consumer_close(kafkaHandle) + // Wait for second Consumer Task to complete + try await group.next() + // Shutdown the serviceGroup + await serviceGroup.triggerGracefulShutdown() + } + } - let timeIntervalTotal = Date.timeIntervalSinceReferenceDate - totalStartDate - let avgRateMb = Double(totalBytes) / timeIntervalTotal / 1024 - benchLog("All read up to ctr: \(ctr), avgRate: (\(Int(avgRateMb))KB/s), timePassed: \(Int(timeIntervalTotal))sec") + Benchmark("librdkafka - basic consumer (messages: \(messageCount))") { benchmark in + let uniqueGroupID = UUID().uuidString + let rdKafkaConsumerConfig: [String: String] = [ + "group.id": uniqueGroupID, + "bootstrap.servers": "\(brokerAddress.host):\(brokerAddress.port)", + "broker.address.family": "v4", + "auto.offset.reset": "beginning", + ] + + let configPointer: OpaquePointer = rd_kafka_conf_new() + for (key, value) in rdKafkaConsumerConfig { + precondition(rd_kafka_conf_set(configPointer, key, value, nil, 0) == RD_KAFKA_CONF_OK) } - Benchmark("librdkafka - with offset commit (messages: \(messageCount))") { benchmark in - let uniqueGroupID = UUID().uuidString - let rdKafkaConsumerConfig: [String: String] = [ - "group.id": uniqueGroupID, - "bootstrap.servers": "\(brokerAddress.host):\(brokerAddress.port)", - "broker.address.family": "v4", - "auto.offset.reset": "beginning", - "enable.auto.commit": "false", - ] - - let configPointer: OpaquePointer = rd_kafka_conf_new() - for (key, value) in rdKafkaConsumerConfig { - precondition(rd_kafka_conf_set(configPointer, key, value, nil, 0) == RD_KAFKA_CONF_OK) - } + let kafkaHandle = rd_kafka_new(RD_KAFKA_CONSUMER, configPointer, nil, 0) + guard let kafkaHandle else { + preconditionFailure("Kafka handle was not created") + } + defer { + rd_kafka_destroy(kafkaHandle) + } - let kafkaHandle = rd_kafka_new(RD_KAFKA_CONSUMER, configPointer, nil, 0) - guard let kafkaHandle else { - preconditionFailure("Kafka handle was not created") - } - defer { - rd_kafka_destroy(kafkaHandle) - } + rd_kafka_poll_set_consumer(kafkaHandle) + let subscriptionList = rd_kafka_topic_partition_list_new(1) + defer { + rd_kafka_topic_partition_list_destroy(subscriptionList) + } + rd_kafka_topic_partition_list_add( + subscriptionList, + uniqueTestTopic, + RD_KAFKA_PARTITION_UA + ) + rd_kafka_subscribe(kafkaHandle, subscriptionList) + rd_kafka_poll(kafkaHandle, 0) - rd_kafka_poll_set_consumer(kafkaHandle) - let subscriptionList = rd_kafka_topic_partition_list_new(1) - defer { - rd_kafka_topic_partition_list_destroy(subscriptionList) - } - rd_kafka_topic_partition_list_add( - subscriptionList, - uniqueTestTopic, - RD_KAFKA_PARTITION_UA - ) - rd_kafka_subscribe(kafkaHandle, subscriptionList) - rd_kafka_poll(kafkaHandle, 0) - - var ctr: UInt64 = 0 - var tmpCtr: UInt64 = 0 - - let interval: UInt64 = Swift.max(UInt64(messageCount / 20), 1) - let totalStartDate = Date.timeIntervalSinceReferenceDate - var totalBytes: UInt64 = 0 - - benchmark.withMeasurement { - while ctr < messageCount { - guard let record = rd_kafka_consumer_poll(kafkaHandle, 10) else { - continue - } - defer { - rd_kafka_message_destroy(record) - } - guard record.pointee.err != RD_KAFKA_RESP_ERR__PARTITION_EOF else { - continue - } - let result = rd_kafka_commit_message(kafkaHandle, record, 0) - precondition(result == RD_KAFKA_RESP_ERR_NO_ERROR) + var ctr: UInt64 = 0 + var tmpCtr: UInt64 = 0 - ctr += 1 - totalBytes += UInt64(record.pointee.len) + let interval: UInt64 = Swift.max(UInt64(messageCount / 20), 1) + let totalStartDate = Date.timeIntervalSinceReferenceDate + var totalBytes: UInt64 = 0 - tmpCtr += 1 - if tmpCtr >= interval { - benchLog("read \(ctr * 100 / UInt64(messageCount))%") - tmpCtr = 0 - } + benchmark.withMeasurement { + while ctr < messageCount { + guard let record = rd_kafka_consumer_poll(kafkaHandle, 10) else { + continue + } + defer { + rd_kafka_message_destroy(record) + } + ctr += 1 + totalBytes += UInt64(record.pointee.len) + + tmpCtr += 1 + if tmpCtr >= interval { + benchLog("read \(ctr * 100 / UInt64(messageCount))%") + tmpCtr = 0 } } + } + + rd_kafka_consumer_close(kafkaHandle) + + let timeIntervalTotal = Date.timeIntervalSinceReferenceDate - totalStartDate + let avgRateMb = Double(totalBytes) / timeIntervalTotal / 1024 + benchLog("All read up to ctr: \(ctr), avgRate: (\(Int(avgRateMb))KB/s), timePassed: \(Int(timeIntervalTotal))sec") + } + + Benchmark("librdkafka - with offset commit (messages: \(messageCount))") { benchmark in + let uniqueGroupID = UUID().uuidString + let rdKafkaConsumerConfig: [String: String] = [ + "group.id": uniqueGroupID, + "bootstrap.servers": "\(brokerAddress.host):\(brokerAddress.port)", + "broker.address.family": "v4", + "auto.offset.reset": "beginning", + "enable.auto.commit": "false", + ] + + let configPointer: OpaquePointer = rd_kafka_conf_new() + for (key, value) in rdKafkaConsumerConfig { + precondition(rd_kafka_conf_set(configPointer, key, value, nil, 0) == RD_KAFKA_CONF_OK) + } + + let kafkaHandle = rd_kafka_new(RD_KAFKA_CONSUMER, configPointer, nil, 0) + guard let kafkaHandle else { + preconditionFailure("Kafka handle was not created") + } + defer { + rd_kafka_destroy(kafkaHandle) + } - rd_kafka_consumer_close(kafkaHandle) + rd_kafka_poll_set_consumer(kafkaHandle) + let subscriptionList = rd_kafka_topic_partition_list_new(1) + defer { + rd_kafka_topic_partition_list_destroy(subscriptionList) + } + rd_kafka_topic_partition_list_add( + subscriptionList, + uniqueTestTopic, + RD_KAFKA_PARTITION_UA + ) + rd_kafka_subscribe(kafkaHandle, subscriptionList) + rd_kafka_poll(kafkaHandle, 0) + + var ctr: UInt64 = 0 + var tmpCtr: UInt64 = 0 + + let interval: UInt64 = Swift.max(UInt64(messageCount / 20), 1) + let totalStartDate = Date.timeIntervalSinceReferenceDate + var totalBytes: UInt64 = 0 + + benchmark.withMeasurement { + while ctr < messageCount { + guard let record = rd_kafka_consumer_poll(kafkaHandle, 10) else { + continue + } + defer { + rd_kafka_message_destroy(record) + } + guard record.pointee.err != RD_KAFKA_RESP_ERR__PARTITION_EOF else { + continue + } + let result = rd_kafka_commit_message(kafkaHandle, record, 0) + precondition(result == RD_KAFKA_RESP_ERR_NO_ERROR) + + ctr += 1 + totalBytes += UInt64(record.pointee.len) - let timeIntervalTotal = Date.timeIntervalSinceReferenceDate - totalStartDate - let avgRateMb = Double(totalBytes) / timeIntervalTotal / 1024 - benchLog("All read up to ctr: \(ctr), avgRate: (\(Int(avgRateMb))KB/s), timePassed: \(Int(timeIntervalTotal))sec") + tmpCtr += 1 + if tmpCtr >= interval { + benchLog("read \(ctr * 100 / UInt64(messageCount))%") + tmpCtr = 0 + } + } } - */ + + rd_kafka_consumer_close(kafkaHandle) + + let timeIntervalTotal = Date.timeIntervalSinceReferenceDate - totalStartDate + let avgRateMb = Double(totalBytes) / timeIntervalTotal / 1024 + benchLog("All read up to ctr: \(ctr), avgRate: (\(Int(avgRateMb))KB/s), timePassed: \(Int(timeIntervalTotal))sec") + } } From a617092d332da14e3c16da49278ffbe33fd94cb5 Mon Sep 17 00:00:00 2001 From: BlindSpot <127803250+blindspotbounty@users.noreply.github.com> Date: Fri, 1 Dec 2023 12:38:51 +0200 Subject: [PATCH 10/25] revert max test time to 5 seconds --- .../SwiftKafkaConsumerBenchmarks/KafkaConsumerBenchmark.swift | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/KafkaConsumerBenchmark.swift b/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/KafkaConsumerBenchmark.swift index db658d67..53c3e239 100644 --- a/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/KafkaConsumerBenchmark.swift +++ b/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/KafkaConsumerBenchmark.swift @@ -29,7 +29,7 @@ let benchmarks = { metrics: [.wallClock, .cpuTotal, .allocatedResidentMemory, .contextSwitches, .throughput] + .arc, warmupIterations: 0, scalingFactor: .one, - maxDuration: .seconds(1), + maxDuration: .seconds(5), maxIterations: 100 ) From 7de0be798d1a7817c7fe57bda037cca80a94ce5e Mon Sep 17 00:00:00 2001 From: BlindSpot <127803250+blindspotbounty@users.noreply.github.com> Date: Thu, 15 Feb 2024 12:39:56 +0200 Subject: [PATCH 11/25] dockerfiles --- docker/docker-compose.2204.57.yaml | 1 + docker/docker-compose.2204.58.yaml | 1 + docker/docker-compose.2204.59.yaml | 1 + docker/docker-compose.2204.main.yaml | 1 + docker/docker-compose.yaml | 1 + 5 files changed, 5 insertions(+) diff --git a/docker/docker-compose.2204.57.yaml b/docker/docker-compose.2204.57.yaml index af7cda0c..a465a610 100644 --- a/docker/docker-compose.2204.57.yaml +++ b/docker/docker-compose.2204.57.yaml @@ -15,6 +15,7 @@ services: test: image: swift-kafka-client:22.04-5.7 environment: + - SWIFT_VERSION=5.7 - WARN_AS_ERROR_ARG=-Xswiftc -warnings-as-errors - STRICT_CONCURRENCY_ARG=-Xswiftc -strict-concurrency=complete # - SANITIZER_ARG=--sanitize=thread # TSan broken still diff --git a/docker/docker-compose.2204.58.yaml b/docker/docker-compose.2204.58.yaml index 521c6ac9..47b02679 100644 --- a/docker/docker-compose.2204.58.yaml +++ b/docker/docker-compose.2204.58.yaml @@ -15,6 +15,7 @@ services: test: image: swift-kafka-client:22.04-5.8 environment: + - SWIFT_VERSION=5.8 - WARN_AS_ERROR_ARG=-Xswiftc -warnings-as-errors - IMPORT_CHECK_ARG=--explicit-target-dependency-import-check error - STRICT_CONCURRENCY_ARG=-Xswiftc -strict-concurrency=complete diff --git a/docker/docker-compose.2204.59.yaml b/docker/docker-compose.2204.59.yaml index e0a562d7..8d9cf29d 100644 --- a/docker/docker-compose.2204.59.yaml +++ b/docker/docker-compose.2204.59.yaml @@ -15,6 +15,7 @@ services: test: image: swift-kafka-client:22.04-5.9 environment: + - SWIFT_VERSION=5.9 - WARN_AS_ERROR_ARG=-Xswiftc -warnings-as-errors - IMPORT_CHECK_ARG=--explicit-target-dependency-import-check error - STRICT_CONCURRENCY_ARG=-Xswiftc -strict-concurrency=complete diff --git a/docker/docker-compose.2204.main.yaml b/docker/docker-compose.2204.main.yaml index b4e098cf..acac1a54 100644 --- a/docker/docker-compose.2204.main.yaml +++ b/docker/docker-compose.2204.main.yaml @@ -11,6 +11,7 @@ services: test: image: swift-kafka-client:22.04-main environment: + - SWIFT_VERSION=main - WARN_AS_ERROR_ARG=-Xswiftc -warnings-as-errors - IMPORT_CHECK_ARG=--explicit-target-dependency-import-check error - STRICT_CONCURRENCY_ARG=-Xswiftc -strict-concurrency=complete diff --git a/docker/docker-compose.yaml b/docker/docker-compose.yaml index d8789f7c..888922c8 100644 --- a/docker/docker-compose.yaml +++ b/docker/docker-compose.yaml @@ -50,6 +50,7 @@ services: <<: *common depends_on: [kafka, runtime-setup] environment: + SWIFT_VERSION: 5.7 KAFKA_HOST: kafka command: > /bin/bash -xcl " From 6a2a1b78d0bf71041ba3408ad870ab666743cbfe Mon Sep 17 00:00:00 2001 From: BlindSpot <127803250+blindspotbounty@users.noreply.github.com> Date: Mon, 26 Feb 2024 19:01:53 +0300 Subject: [PATCH 12/25] test set threasholds --- .../KafkaConsumerBenchmark.swift | 22 ++++++++++++++----- .../BenchmarkCheckWrapper.swift | 8 +++++++ ...umer_basic_consumer_messages_1000.p90.json | 10 +++++++++ ..._with_offset_commit_messages_1000.p90.json | 10 +++++++++ ...afka_basic_consumer_messages_1000.p90.json | 10 +++++++++ ..._with_offset_commit_messages_1000.p90.json | 10 +++++++++ ...umer_basic_consumer_messages_1000.p90.json | 10 +++++++++ ..._with_offset_commit_messages_1000.p90.json | 10 +++++++++ ...afka_basic_consumer_messages_1000.p90.json | 10 +++++++++ ..._with_offset_commit_messages_1000.p90.json | 10 +++++++++ ...umer_basic_consumer_messages_1000.p90.json | 10 +++++++++ ..._with_offset_commit_messages_1000.p90.json | 10 +++++++++ ...afka_basic_consumer_messages_1000.p90.json | 10 +++++++++ ..._with_offset_commit_messages_1000.p90.json | 10 +++++++++ ...umer_basic_consumer_messages_1000.p90.json | 10 +++++++++ ..._with_offset_commit_messages_1000.p90.json | 10 +++++++++ ...afka_basic_consumer_messages_1000.p90.json | 10 +++++++++ ..._with_offset_commit_messages_1000.p90.json | 10 +++++++++ ...umer_basic_consumer_messages_1000.p90.json | 10 +++++++++ ..._with_offset_commit_messages_1000.p90.json | 10 +++++++++ ...afka_basic_consumer_messages_1000.p90.json | 10 +++++++++ ..._with_offset_commit_messages_1000.p90.json | 10 +++++++++ 22 files changed, 225 insertions(+), 5 deletions(-) create mode 100644 Benchmarks/Plugins/BenchmarkCheckWrapper/BenchmarkCheckWrapper.swift create mode 100644 Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json create mode 100644 Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json create mode 100644 Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json create mode 100644 Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json create mode 100644 Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json create mode 100644 Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json create mode 100644 Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json create mode 100644 Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json create mode 100644 Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json create mode 100644 Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json create mode 100644 Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json create mode 100644 Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json create mode 100644 Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json create mode 100644 Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json create mode 100644 Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json create mode 100644 Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json create mode 100644 Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json create mode 100644 Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json create mode 100644 Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json create mode 100644 Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json diff --git a/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/KafkaConsumerBenchmark.swift b/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/KafkaConsumerBenchmark.swift index 53c3e239..66da8ce6 100644 --- a/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/KafkaConsumerBenchmark.swift +++ b/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/KafkaConsumerBenchmark.swift @@ -30,7 +30,19 @@ let benchmarks = { warmupIterations: 0, scalingFactor: .one, maxDuration: .seconds(5), - maxIterations: 100 + maxIterations: 100, + thresholds: [ + // Thresholds are wild guess mostly. Have to adjust with time. + .wallClock: .init(relative: [.p90: 10]), + .cpuTotal: .init(relative: [.p90: 0]), + .allocatedResidentMemory: .init(relative: [.p90: 20]), + .contextSwitches: .init(relative: [.p90: 10]), + .throughput: .init(relative: [.p90: 10]), + .objectAllocCount: .init(relative: [.p90: 10]), + .retainCount: .init(relative: [.p90: 10]), + .releaseCount: .init(relative: [.p90: 10]), + .retainReleaseDelta: .init(relative: [.p90: 10]), + ] ) Benchmark.setup = { @@ -44,7 +56,7 @@ let benchmarks = { uniqueTestTopic = nil } - Benchmark("SwiftKafkaConsumer - basic consumer (messages: \(messageCount))") { benchmark in + Benchmark("SwiftKafkaConsumer_basic_consumer_messages_\(messageCount)") { benchmark in let uniqueGroupID = UUID().uuidString var consumerConfig = KafkaConsumerConfiguration( consumptionStrategy: .group( @@ -112,7 +124,7 @@ let benchmarks = { } } - Benchmark("SwiftKafkaConsumer - with offset commit (messages: \(messageCount))") { benchmark in + Benchmark("SwiftKafkaConsumer_with_offset_commit_messages_\(messageCount)") { benchmark in let uniqueGroupID = UUID().uuidString var consumerConfig = KafkaConsumerConfiguration( consumptionStrategy: .group( @@ -183,7 +195,7 @@ let benchmarks = { } } - Benchmark("librdkafka - basic consumer (messages: \(messageCount))") { benchmark in + Benchmark("librdkafka_basic_consumer_messages_\(messageCount)") { benchmark in let uniqueGroupID = UUID().uuidString let rdKafkaConsumerConfig: [String: String] = [ "group.id": uniqueGroupID, @@ -251,7 +263,7 @@ let benchmarks = { benchLog("All read up to ctr: \(ctr), avgRate: (\(Int(avgRateMb))KB/s), timePassed: \(Int(timeIntervalTotal))sec") } - Benchmark("librdkafka - with offset commit (messages: \(messageCount))") { benchmark in + Benchmark("librdkafka_with_offset_commit_messages_\(messageCount)") { benchmark in let uniqueGroupID = UUID().uuidString let rdKafkaConsumerConfig: [String: String] = [ "group.id": uniqueGroupID, diff --git a/Benchmarks/Plugins/BenchmarkCheckWrapper/BenchmarkCheckWrapper.swift b/Benchmarks/Plugins/BenchmarkCheckWrapper/BenchmarkCheckWrapper.swift new file mode 100644 index 00000000..9a76d5df --- /dev/null +++ b/Benchmarks/Plugins/BenchmarkCheckWrapper/BenchmarkCheckWrapper.swift @@ -0,0 +1,8 @@ +// +// File.swift +// +// +// Created by Andrey Potemkin on 26.2.2024. +// + +import Foundation diff --git a/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json b/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json new file mode 100644 index 00000000..2a2daad0 --- /dev/null +++ b/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json @@ -0,0 +1,10 @@ +{ + "allocatedResidentMemory" : 77660159, + "cpuTotal" : 80000000, + "objectAllocCount" : 4643, + "releaseCount" : 30815, + "retainCount" : 23791, + "retainReleaseDelta" : 2391, + "throughput" : 8, + "wallClock" : 133693439 +} \ No newline at end of file diff --git a/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json b/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json new file mode 100644 index 00000000..0aae76e0 --- /dev/null +++ b/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json @@ -0,0 +1,10 @@ +{ + "allocatedResidentMemory" : 70254592, + "cpuTotal" : 270000000, + "objectAllocCount" : 7917, + "releaseCount" : 38881, + "retainCount" : 27059, + "retainReleaseDelta" : 3905, + "throughput" : 2, + "wallClock" : 634566500 +} \ No newline at end of file diff --git a/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json b/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json new file mode 100644 index 00000000..b46e3959 --- /dev/null +++ b/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json @@ -0,0 +1,10 @@ +{ + "allocatedResidentMemory" : 42860544, + "cpuTotal" : 20000000, + "objectAllocCount" : 0, + "releaseCount" : 0, + "retainCount" : 0, + "retainReleaseDelta" : 0, + "throughput" : 2, + "wallClock" : 620173292 +} \ No newline at end of file diff --git a/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json b/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json new file mode 100644 index 00000000..4813e13f --- /dev/null +++ b/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json @@ -0,0 +1,10 @@ +{ + "allocatedResidentMemory" : 48300032, + "cpuTotal" : 150000000, + "objectAllocCount" : 0, + "releaseCount" : 0, + "retainCount" : 0, + "retainReleaseDelta" : 0, + "throughput" : 1, + "wallClock" : 827764833 +} \ No newline at end of file diff --git a/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json b/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json new file mode 100644 index 00000000..30809eea --- /dev/null +++ b/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json @@ -0,0 +1,10 @@ +{ + "allocatedResidentMemory" : 69468160, + "cpuTotal" : 270000000, + "objectAllocCount" : 7175, + "releaseCount" : 37657, + "retainCount" : 26402, + "retainReleaseDelta" : 4080, + "throughput" : 2, + "wallClock" : 683494251 +} \ No newline at end of file diff --git a/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json b/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json new file mode 100644 index 00000000..ea4aeb29 --- /dev/null +++ b/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json @@ -0,0 +1,10 @@ +{ + "allocatedResidentMemory" : 73007104, + "cpuTotal" : 270000000, + "objectAllocCount" : 7941, + "releaseCount" : 39038, + "retainCount" : 27174, + "retainReleaseDelta" : 3923, + "throughput" : 2, + "wallClock" : 644024709 +} \ No newline at end of file diff --git a/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json b/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json new file mode 100644 index 00000000..e293e7f1 --- /dev/null +++ b/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json @@ -0,0 +1,10 @@ +{ + "allocatedResidentMemory" : 46596096, + "cpuTotal" : 20000000, + "objectAllocCount" : 0, + "releaseCount" : 0, + "retainCount" : 0, + "retainReleaseDelta" : 0, + "throughput" : 2, + "wallClock" : 627353501 +} \ No newline at end of file diff --git a/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json b/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json new file mode 100644 index 00000000..7e4f31da --- /dev/null +++ b/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json @@ -0,0 +1,10 @@ +{ + "allocatedResidentMemory" : 44040192, + "cpuTotal" : 150000000, + "objectAllocCount" : 0, + "releaseCount" : 0, + "retainCount" : 0, + "retainReleaseDelta" : 0, + "throughput" : 1, + "wallClock" : 835152709 +} \ No newline at end of file diff --git a/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json b/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json new file mode 100644 index 00000000..98c2cb96 --- /dev/null +++ b/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json @@ -0,0 +1,10 @@ +{ + "allocatedResidentMemory" : 77463551, + "cpuTotal" : 70057983, + "objectAllocCount" : 4647, + "releaseCount" : 25903, + "retainCount" : 18863, + "retainReleaseDelta" : 2399, + "throughput" : 8, + "wallClock" : 132972543 +} \ No newline at end of file diff --git a/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json b/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json new file mode 100644 index 00000000..ed0bb51c --- /dev/null +++ b/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json @@ -0,0 +1,10 @@ +{ + "allocatedResidentMemory" : 72351744, + "cpuTotal" : 240000000, + "objectAllocCount" : 7980, + "releaseCount" : 34126, + "retainCount" : 22195, + "retainReleaseDelta" : 3951, + "throughput" : 2, + "wallClock" : 629008416 +} \ No newline at end of file diff --git a/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json b/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json new file mode 100644 index 00000000..f025dbaa --- /dev/null +++ b/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json @@ -0,0 +1,10 @@ +{ + "allocatedResidentMemory" : 46530560, + "cpuTotal" : 10000000, + "objectAllocCount" : 0, + "releaseCount" : 0, + "retainCount" : 0, + "retainReleaseDelta" : 0, + "throughput" : 2, + "wallClock" : 621817625 +} \ No newline at end of file diff --git a/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json b/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json new file mode 100644 index 00000000..7c1b356c --- /dev/null +++ b/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json @@ -0,0 +1,10 @@ +{ + "allocatedResidentMemory" : 48234496, + "cpuTotal" : 150000000, + "objectAllocCount" : 0, + "releaseCount" : 0, + "retainCount" : 0, + "retainReleaseDelta" : 0, + "throughput" : 1, + "wallClock" : 826038376 +} \ No newline at end of file diff --git a/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json b/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json new file mode 100644 index 00000000..483a6912 --- /dev/null +++ b/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json @@ -0,0 +1,10 @@ +{ + "allocatedResidentMemory" : 72876032, + "cpuTotal" : 270000000, + "objectAllocCount" : 6921, + "releaseCount" : 31952, + "retainCount" : 21121, + "retainReleaseDelta" : 3910, + "throughput" : 2, + "wallClock" : 639700000 +} \ No newline at end of file diff --git a/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json b/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json new file mode 100644 index 00000000..7949338d --- /dev/null +++ b/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json @@ -0,0 +1,10 @@ +{ + "allocatedResidentMemory" : 72089600, + "cpuTotal" : 260000000, + "objectAllocCount" : 7907, + "releaseCount" : 33901, + "retainCount" : 22094, + "retainReleaseDelta" : 3900, + "throughput" : 2, + "wallClock" : 631249876 +} \ No newline at end of file diff --git a/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json b/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json new file mode 100644 index 00000000..2b5db7c0 --- /dev/null +++ b/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json @@ -0,0 +1,10 @@ +{ + "allocatedResidentMemory" : 43319296, + "cpuTotal" : 20000000, + "objectAllocCount" : 0, + "releaseCount" : 0, + "retainCount" : 0, + "retainReleaseDelta" : 0, + "throughput" : 2, + "wallClock" : 620977209 +} \ No newline at end of file diff --git a/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json b/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json new file mode 100644 index 00000000..81cf5434 --- /dev/null +++ b/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json @@ -0,0 +1,10 @@ +{ + "allocatedResidentMemory" : 44761088, + "cpuTotal" : 150000000, + "objectAllocCount" : 0, + "releaseCount" : 0, + "retainCount" : 0, + "retainReleaseDelta" : 0, + "throughput" : 1, + "wallClock" : 819931167 +} \ No newline at end of file diff --git a/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json b/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json new file mode 100644 index 00000000..7f3726ec --- /dev/null +++ b/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json @@ -0,0 +1,10 @@ +{ + "allocatedResidentMemory" : 75759616, + "cpuTotal" : 280000000, + "objectAllocCount" : 6919, + "releaseCount" : 36876, + "retainCount" : 26056, + "retainReleaseDelta" : 3909, + "throughput" : 2, + "wallClock" : 636520708 +} \ No newline at end of file diff --git a/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json b/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json new file mode 100644 index 00000000..d68ac947 --- /dev/null +++ b/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json @@ -0,0 +1,10 @@ +{ + "allocatedResidentMemory" : 72744960, + "cpuTotal" : 290000000, + "objectAllocCount" : 7906, + "releaseCount" : 38839, + "retainCount" : 27022, + "retainReleaseDelta" : 3921, + "throughput" : 2, + "wallClock" : 634470084 +} \ No newline at end of file diff --git a/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json b/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json new file mode 100644 index 00000000..0e90b68b --- /dev/null +++ b/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json @@ -0,0 +1,10 @@ +{ + "allocatedResidentMemory" : 50200576, + "cpuTotal" : 20000000, + "objectAllocCount" : 0, + "releaseCount" : 0, + "retainCount" : 0, + "retainReleaseDelta" : 0, + "throughput" : 2, + "wallClock" : 618693125 +} \ No newline at end of file diff --git a/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json b/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json new file mode 100644 index 00000000..e5ba9512 --- /dev/null +++ b/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json @@ -0,0 +1,10 @@ +{ + "allocatedResidentMemory" : 47185920, + "cpuTotal" : 150000000, + "objectAllocCount" : 0, + "releaseCount" : 0, + "retainCount" : 0, + "retainReleaseDelta" : 0, + "throughput" : 1, + "wallClock" : 815092876 +} \ No newline at end of file From 34e7b4d1b91d62a91b15391476ab63358d83318c Mon Sep 17 00:00:00 2001 From: BlindSpot <127803250+blindspotbounty@users.noreply.github.com> Date: Tue, 27 Feb 2024 11:48:27 +0300 Subject: [PATCH 13/25] create dummy thresholds from ci results --- .../KafkaConsumerBenchmark.swift | 2 +- .../KafkaProducerBenchmark.swift | 14 +++++++++++++- .../BenchmarkCheckWrapper.swift | 8 -------- ...sumer_basic_consumer_messages_1000.p90.json | 16 ++++++++-------- ...r_with_offset_commit_messages_1000.p90.json | 16 ++++++++-------- ...kafka_basic_consumer_messages_1000.p90.json | 6 +++--- ...a_with_offset_commit_messages_1000.p90.json | 8 ++++---- ...sumer_basic_consumer_messages_1000.p90.json | 18 +++++++++--------- ...r_with_offset_commit_messages_1000.p90.json | 16 ++++++++-------- ...kafka_basic_consumer_messages_1000.p90.json | 6 +++--- ...a_with_offset_commit_messages_1000.p90.json | 8 ++++---- ...sumer_basic_consumer_messages_1000.p90.json | 4 ++-- ...r_with_offset_commit_messages_1000.p90.json | 4 ++-- ...kafka_basic_consumer_messages_1000.p90.json | 4 ++-- ...a_with_offset_commit_messages_1000.p90.json | 6 +++--- ...sumer_basic_consumer_messages_1000.p90.json | 18 +++++++++--------- ...r_with_offset_commit_messages_1000.p90.json | 16 ++++++++-------- ...kafka_basic_consumer_messages_1000.p90.json | 6 +++--- ...a_with_offset_commit_messages_1000.p90.json | 8 ++++---- ...sumer_basic_consumer_messages_1000.p90.json | 18 +++++++++--------- ...r_with_offset_commit_messages_1000.p90.json | 16 ++++++++-------- ...kafka_basic_consumer_messages_1000.p90.json | 6 +++--- ...a_with_offset_commit_messages_1000.p90.json | 8 ++++---- 23 files changed, 118 insertions(+), 114 deletions(-) delete mode 100644 Benchmarks/Plugins/BenchmarkCheckWrapper/BenchmarkCheckWrapper.swift diff --git a/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/KafkaConsumerBenchmark.swift b/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/KafkaConsumerBenchmark.swift index 66da8ce6..5177d8df 100644 --- a/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/KafkaConsumerBenchmark.swift +++ b/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/KafkaConsumerBenchmark.swift @@ -34,7 +34,7 @@ let benchmarks = { thresholds: [ // Thresholds are wild guess mostly. Have to adjust with time. .wallClock: .init(relative: [.p90: 10]), - .cpuTotal: .init(relative: [.p90: 0]), + .cpuTotal: .init(relative: [.p90: 10]), .allocatedResidentMemory: .init(relative: [.p90: 20]), .contextSwitches: .init(relative: [.p90: 10]), .throughput: .init(relative: [.p90: 10]), diff --git a/Benchmarks/Benchmarks/SwiftKafkaProducerBenchmarks/KafkaProducerBenchmark.swift b/Benchmarks/Benchmarks/SwiftKafkaProducerBenchmarks/KafkaProducerBenchmark.swift index 87f0a50b..1971d9e0 100644 --- a/Benchmarks/Benchmarks/SwiftKafkaProducerBenchmarks/KafkaProducerBenchmark.swift +++ b/Benchmarks/Benchmarks/SwiftKafkaProducerBenchmarks/KafkaProducerBenchmark.swift @@ -22,7 +22,19 @@ let benchmarks = { warmupIterations: 0, scalingFactor: .one, maxDuration: .seconds(5), - maxIterations: 100 + maxIterations: 100, + thresholds: [ + // Thresholds are wild guess mostly. Have to adjust with time. + .wallClock: .init(relative: [.p90: 10]), + .cpuTotal: .init(relative: [.p90: 10]), + .allocatedResidentMemory: .init(relative: [.p90: 20]), + .contextSwitches: .init(relative: [.p90: 10]), + .throughput: .init(relative: [.p90: 10]), + .objectAllocCount: .init(relative: [.p90: 10]), + .retainCount: .init(relative: [.p90: 10]), + .releaseCount: .init(relative: [.p90: 10]), + .retainReleaseDelta: .init(relative: [.p90: 10]), + ] ) Benchmark.setup = {} diff --git a/Benchmarks/Plugins/BenchmarkCheckWrapper/BenchmarkCheckWrapper.swift b/Benchmarks/Plugins/BenchmarkCheckWrapper/BenchmarkCheckWrapper.swift deleted file mode 100644 index 9a76d5df..00000000 --- a/Benchmarks/Plugins/BenchmarkCheckWrapper/BenchmarkCheckWrapper.swift +++ /dev/null @@ -1,8 +0,0 @@ -// -// File.swift -// -// -// Created by Andrey Potemkin on 26.2.2024. -// - -import Foundation diff --git a/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json b/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json index 2a2daad0..aee9e223 100644 --- a/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json +++ b/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json @@ -1,10 +1,10 @@ { - "allocatedResidentMemory" : 77660159, - "cpuTotal" : 80000000, - "objectAllocCount" : 4643, - "releaseCount" : 30815, - "retainCount" : 23791, - "retainReleaseDelta" : 2391, + "allocatedResidentMemory" : 77463551, + "cpuTotal" : 70057983, + "objectAllocCount" : 4647, + "releaseCount" : 25903, + "retainCount" : 18863, + "retainReleaseDelta" : 2399, "throughput" : 8, - "wallClock" : 133693439 -} \ No newline at end of file + "wallClock" : 9994972543 +} diff --git a/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json b/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json index 0aae76e0..0ecf5a24 100644 --- a/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json +++ b/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json @@ -1,10 +1,10 @@ { - "allocatedResidentMemory" : 70254592, - "cpuTotal" : 270000000, - "objectAllocCount" : 7917, - "releaseCount" : 38881, - "retainCount" : 27059, - "retainReleaseDelta" : 3905, + "allocatedResidentMemory" : 72351744, + "cpuTotal" : 240000000, + "objectAllocCount" : 7980, + "releaseCount" : 34126, + "retainCount" : 22195, + "retainReleaseDelta" : 3951, "throughput" : 2, - "wallClock" : 634566500 -} \ No newline at end of file + "wallClock" : 9995008416 +} diff --git a/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json b/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json index b46e3959..03e257ef 100644 --- a/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json +++ b/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json @@ -1,10 +1,10 @@ { - "allocatedResidentMemory" : 42860544, + "allocatedResidentMemory" : 46530560, "cpuTotal" : 20000000, "objectAllocCount" : 0, "releaseCount" : 0, "retainCount" : 0, "retainReleaseDelta" : 0, "throughput" : 2, - "wallClock" : 620173292 -} \ No newline at end of file + "wallClock" : 621817625 +} diff --git a/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json b/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json index 4813e13f..6d2137ba 100644 --- a/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json +++ b/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json @@ -1,10 +1,10 @@ { - "allocatedResidentMemory" : 48300032, - "cpuTotal" : 150000000, + "allocatedResidentMemory" : 48234496, + "cpuTotal" : 400000000, "objectAllocCount" : 0, "releaseCount" : 0, "retainCount" : 0, "retainReleaseDelta" : 0, "throughput" : 1, - "wallClock" : 827764833 -} \ No newline at end of file + "wallClock" : 1506038376 +} diff --git a/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json b/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json index 30809eea..aee9e223 100644 --- a/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json +++ b/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json @@ -1,10 +1,10 @@ { - "allocatedResidentMemory" : 69468160, - "cpuTotal" : 270000000, - "objectAllocCount" : 7175, - "releaseCount" : 37657, - "retainCount" : 26402, - "retainReleaseDelta" : 4080, - "throughput" : 2, - "wallClock" : 683494251 -} \ No newline at end of file + "allocatedResidentMemory" : 77463551, + "cpuTotal" : 70057983, + "objectAllocCount" : 4647, + "releaseCount" : 25903, + "retainCount" : 18863, + "retainReleaseDelta" : 2399, + "throughput" : 8, + "wallClock" : 9994972543 +} diff --git a/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json b/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json index ea4aeb29..0ecf5a24 100644 --- a/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json +++ b/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json @@ -1,10 +1,10 @@ { - "allocatedResidentMemory" : 73007104, - "cpuTotal" : 270000000, - "objectAllocCount" : 7941, - "releaseCount" : 39038, - "retainCount" : 27174, - "retainReleaseDelta" : 3923, + "allocatedResidentMemory" : 72351744, + "cpuTotal" : 240000000, + "objectAllocCount" : 7980, + "releaseCount" : 34126, + "retainCount" : 22195, + "retainReleaseDelta" : 3951, "throughput" : 2, - "wallClock" : 644024709 -} \ No newline at end of file + "wallClock" : 9995008416 +} diff --git a/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json b/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json index e293e7f1..03e257ef 100644 --- a/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json +++ b/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json @@ -1,10 +1,10 @@ { - "allocatedResidentMemory" : 46596096, + "allocatedResidentMemory" : 46530560, "cpuTotal" : 20000000, "objectAllocCount" : 0, "releaseCount" : 0, "retainCount" : 0, "retainReleaseDelta" : 0, "throughput" : 2, - "wallClock" : 627353501 -} \ No newline at end of file + "wallClock" : 621817625 +} diff --git a/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json b/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json index 7e4f31da..6d2137ba 100644 --- a/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json +++ b/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json @@ -1,10 +1,10 @@ { - "allocatedResidentMemory" : 44040192, - "cpuTotal" : 150000000, + "allocatedResidentMemory" : 48234496, + "cpuTotal" : 400000000, "objectAllocCount" : 0, "releaseCount" : 0, "retainCount" : 0, "retainReleaseDelta" : 0, "throughput" : 1, - "wallClock" : 835152709 -} \ No newline at end of file + "wallClock" : 1506038376 +} diff --git a/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json b/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json index 98c2cb96..aee9e223 100644 --- a/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json +++ b/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json @@ -6,5 +6,5 @@ "retainCount" : 18863, "retainReleaseDelta" : 2399, "throughput" : 8, - "wallClock" : 132972543 -} \ No newline at end of file + "wallClock" : 9994972543 +} diff --git a/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json b/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json index ed0bb51c..0ecf5a24 100644 --- a/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json +++ b/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json @@ -6,5 +6,5 @@ "retainCount" : 22195, "retainReleaseDelta" : 3951, "throughput" : 2, - "wallClock" : 629008416 -} \ No newline at end of file + "wallClock" : 9995008416 +} diff --git a/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json b/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json index f025dbaa..03e257ef 100644 --- a/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json +++ b/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json @@ -1,10 +1,10 @@ { "allocatedResidentMemory" : 46530560, - "cpuTotal" : 10000000, + "cpuTotal" : 20000000, "objectAllocCount" : 0, "releaseCount" : 0, "retainCount" : 0, "retainReleaseDelta" : 0, "throughput" : 2, "wallClock" : 621817625 -} \ No newline at end of file +} diff --git a/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json b/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json index 7c1b356c..6d2137ba 100644 --- a/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json +++ b/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json @@ -1,10 +1,10 @@ { "allocatedResidentMemory" : 48234496, - "cpuTotal" : 150000000, + "cpuTotal" : 400000000, "objectAllocCount" : 0, "releaseCount" : 0, "retainCount" : 0, "retainReleaseDelta" : 0, "throughput" : 1, - "wallClock" : 826038376 -} \ No newline at end of file + "wallClock" : 1506038376 +} diff --git a/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json b/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json index 483a6912..aee9e223 100644 --- a/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json +++ b/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json @@ -1,10 +1,10 @@ { - "allocatedResidentMemory" : 72876032, - "cpuTotal" : 270000000, - "objectAllocCount" : 6921, - "releaseCount" : 31952, - "retainCount" : 21121, - "retainReleaseDelta" : 3910, - "throughput" : 2, - "wallClock" : 639700000 -} \ No newline at end of file + "allocatedResidentMemory" : 77463551, + "cpuTotal" : 70057983, + "objectAllocCount" : 4647, + "releaseCount" : 25903, + "retainCount" : 18863, + "retainReleaseDelta" : 2399, + "throughput" : 8, + "wallClock" : 9994972543 +} diff --git a/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json b/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json index 7949338d..0ecf5a24 100644 --- a/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json +++ b/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json @@ -1,10 +1,10 @@ { - "allocatedResidentMemory" : 72089600, - "cpuTotal" : 260000000, - "objectAllocCount" : 7907, - "releaseCount" : 33901, - "retainCount" : 22094, - "retainReleaseDelta" : 3900, + "allocatedResidentMemory" : 72351744, + "cpuTotal" : 240000000, + "objectAllocCount" : 7980, + "releaseCount" : 34126, + "retainCount" : 22195, + "retainReleaseDelta" : 3951, "throughput" : 2, - "wallClock" : 631249876 -} \ No newline at end of file + "wallClock" : 9995008416 +} diff --git a/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json b/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json index 2b5db7c0..03e257ef 100644 --- a/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json +++ b/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json @@ -1,10 +1,10 @@ { - "allocatedResidentMemory" : 43319296, + "allocatedResidentMemory" : 46530560, "cpuTotal" : 20000000, "objectAllocCount" : 0, "releaseCount" : 0, "retainCount" : 0, "retainReleaseDelta" : 0, "throughput" : 2, - "wallClock" : 620977209 -} \ No newline at end of file + "wallClock" : 621817625 +} diff --git a/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json b/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json index 81cf5434..6d2137ba 100644 --- a/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json +++ b/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json @@ -1,10 +1,10 @@ { - "allocatedResidentMemory" : 44761088, - "cpuTotal" : 150000000, + "allocatedResidentMemory" : 48234496, + "cpuTotal" : 400000000, "objectAllocCount" : 0, "releaseCount" : 0, "retainCount" : 0, "retainReleaseDelta" : 0, "throughput" : 1, - "wallClock" : 819931167 -} \ No newline at end of file + "wallClock" : 1506038376 +} diff --git a/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json b/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json index 7f3726ec..aee9e223 100644 --- a/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json +++ b/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json @@ -1,10 +1,10 @@ { - "allocatedResidentMemory" : 75759616, - "cpuTotal" : 280000000, - "objectAllocCount" : 6919, - "releaseCount" : 36876, - "retainCount" : 26056, - "retainReleaseDelta" : 3909, - "throughput" : 2, - "wallClock" : 636520708 -} \ No newline at end of file + "allocatedResidentMemory" : 77463551, + "cpuTotal" : 70057983, + "objectAllocCount" : 4647, + "releaseCount" : 25903, + "retainCount" : 18863, + "retainReleaseDelta" : 2399, + "throughput" : 8, + "wallClock" : 9994972543 +} diff --git a/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json b/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json index d68ac947..0ecf5a24 100644 --- a/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json +++ b/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json @@ -1,10 +1,10 @@ { - "allocatedResidentMemory" : 72744960, - "cpuTotal" : 290000000, - "objectAllocCount" : 7906, - "releaseCount" : 38839, - "retainCount" : 27022, - "retainReleaseDelta" : 3921, + "allocatedResidentMemory" : 72351744, + "cpuTotal" : 240000000, + "objectAllocCount" : 7980, + "releaseCount" : 34126, + "retainCount" : 22195, + "retainReleaseDelta" : 3951, "throughput" : 2, - "wallClock" : 634470084 -} \ No newline at end of file + "wallClock" : 9995008416 +} diff --git a/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json b/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json index 0e90b68b..03e257ef 100644 --- a/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json +++ b/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json @@ -1,10 +1,10 @@ { - "allocatedResidentMemory" : 50200576, + "allocatedResidentMemory" : 46530560, "cpuTotal" : 20000000, "objectAllocCount" : 0, "releaseCount" : 0, "retainCount" : 0, "retainReleaseDelta" : 0, "throughput" : 2, - "wallClock" : 618693125 -} \ No newline at end of file + "wallClock" : 621817625 +} diff --git a/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json b/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json index e5ba9512..6d2137ba 100644 --- a/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json +++ b/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json @@ -1,10 +1,10 @@ { - "allocatedResidentMemory" : 47185920, - "cpuTotal" : 150000000, + "allocatedResidentMemory" : 48234496, + "cpuTotal" : 400000000, "objectAllocCount" : 0, "releaseCount" : 0, "retainCount" : 0, "retainReleaseDelta" : 0, "throughput" : 1, - "wallClock" : 815092876 -} \ No newline at end of file + "wallClock" : 1506038376 +} From 7f4c8e250ea9ec2e7665a3101dc3b6234ddb074c Mon Sep 17 00:00:00 2001 From: BlindSpot <127803250+blindspotbounty@users.noreply.github.com> Date: Tue, 27 Feb 2024 12:06:14 +0300 Subject: [PATCH 14/25] disable benchmark in CI --- ...umer_basic_consumer_messages_1000.p90.json | 10 ------- ..._with_offset_commit_messages_1000.p90.json | 10 ------- ...afka_basic_consumer_messages_1000.p90.json | 10 ------- ..._with_offset_commit_messages_1000.p90.json | 10 ------- ...umer_basic_consumer_messages_1000.p90.json | 10 ------- ..._with_offset_commit_messages_1000.p90.json | 10 ------- ...afka_basic_consumer_messages_1000.p90.json | 10 ------- ..._with_offset_commit_messages_1000.p90.json | 10 ------- ...umer_basic_consumer_messages_1000.p90.json | 10 ------- ..._with_offset_commit_messages_1000.p90.json | 10 ------- ...afka_basic_consumer_messages_1000.p90.json | 10 ------- ..._with_offset_commit_messages_1000.p90.json | 10 ------- ...umer_basic_consumer_messages_1000.p90.json | 10 ------- ..._with_offset_commit_messages_1000.p90.json | 10 ------- ...afka_basic_consumer_messages_1000.p90.json | 10 ------- ..._with_offset_commit_messages_1000.p90.json | 10 ------- ...umer_basic_consumer_messages_1000.p90.json | 10 ------- ..._with_offset_commit_messages_1000.p90.json | 10 ------- ...afka_basic_consumer_messages_1000.p90.json | 10 ------- ..._with_offset_commit_messages_1000.p90.json | 10 ------- dev/test-benchmark-thresholds.sh | 27 +++++++++++++++++++ docker/docker-compose.yaml | 3 +-- 22 files changed, 28 insertions(+), 202 deletions(-) delete mode 100644 Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json delete mode 100644 Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json delete mode 100644 Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json delete mode 100644 Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json delete mode 100644 Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json delete mode 100644 Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json delete mode 100644 Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json delete mode 100644 Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json delete mode 100644 Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json delete mode 100644 Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json delete mode 100644 Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json delete mode 100644 Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json delete mode 100644 Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json delete mode 100644 Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json delete mode 100644 Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json delete mode 100644 Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json delete mode 100644 Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json delete mode 100644 Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json delete mode 100644 Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json delete mode 100644 Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json create mode 100644 dev/test-benchmark-thresholds.sh diff --git a/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json b/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json deleted file mode 100644 index aee9e223..00000000 --- a/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "allocatedResidentMemory" : 77463551, - "cpuTotal" : 70057983, - "objectAllocCount" : 4647, - "releaseCount" : 25903, - "retainCount" : 18863, - "retainReleaseDelta" : 2399, - "throughput" : 8, - "wallClock" : 9994972543 -} diff --git a/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json b/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json deleted file mode 100644 index 0ecf5a24..00000000 --- a/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "allocatedResidentMemory" : 72351744, - "cpuTotal" : 240000000, - "objectAllocCount" : 7980, - "releaseCount" : 34126, - "retainCount" : 22195, - "retainReleaseDelta" : 3951, - "throughput" : 2, - "wallClock" : 9995008416 -} diff --git a/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json b/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json deleted file mode 100644 index 03e257ef..00000000 --- a/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "allocatedResidentMemory" : 46530560, - "cpuTotal" : 20000000, - "objectAllocCount" : 0, - "releaseCount" : 0, - "retainCount" : 0, - "retainReleaseDelta" : 0, - "throughput" : 2, - "wallClock" : 621817625 -} diff --git a/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json b/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json deleted file mode 100644 index 6d2137ba..00000000 --- a/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "allocatedResidentMemory" : 48234496, - "cpuTotal" : 400000000, - "objectAllocCount" : 0, - "releaseCount" : 0, - "retainCount" : 0, - "retainReleaseDelta" : 0, - "throughput" : 1, - "wallClock" : 1506038376 -} diff --git a/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json b/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json deleted file mode 100644 index aee9e223..00000000 --- a/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "allocatedResidentMemory" : 77463551, - "cpuTotal" : 70057983, - "objectAllocCount" : 4647, - "releaseCount" : 25903, - "retainCount" : 18863, - "retainReleaseDelta" : 2399, - "throughput" : 8, - "wallClock" : 9994972543 -} diff --git a/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json b/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json deleted file mode 100644 index 0ecf5a24..00000000 --- a/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "allocatedResidentMemory" : 72351744, - "cpuTotal" : 240000000, - "objectAllocCount" : 7980, - "releaseCount" : 34126, - "retainCount" : 22195, - "retainReleaseDelta" : 3951, - "throughput" : 2, - "wallClock" : 9995008416 -} diff --git a/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json b/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json deleted file mode 100644 index 03e257ef..00000000 --- a/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "allocatedResidentMemory" : 46530560, - "cpuTotal" : 20000000, - "objectAllocCount" : 0, - "releaseCount" : 0, - "retainCount" : 0, - "retainReleaseDelta" : 0, - "throughput" : 2, - "wallClock" : 621817625 -} diff --git a/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json b/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json deleted file mode 100644 index 6d2137ba..00000000 --- a/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "allocatedResidentMemory" : 48234496, - "cpuTotal" : 400000000, - "objectAllocCount" : 0, - "releaseCount" : 0, - "retainCount" : 0, - "retainReleaseDelta" : 0, - "throughput" : 1, - "wallClock" : 1506038376 -} diff --git a/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json b/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json deleted file mode 100644 index aee9e223..00000000 --- a/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "allocatedResidentMemory" : 77463551, - "cpuTotal" : 70057983, - "objectAllocCount" : 4647, - "releaseCount" : 25903, - "retainCount" : 18863, - "retainReleaseDelta" : 2399, - "throughput" : 8, - "wallClock" : 9994972543 -} diff --git a/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json b/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json deleted file mode 100644 index 0ecf5a24..00000000 --- a/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "allocatedResidentMemory" : 72351744, - "cpuTotal" : 240000000, - "objectAllocCount" : 7980, - "releaseCount" : 34126, - "retainCount" : 22195, - "retainReleaseDelta" : 3951, - "throughput" : 2, - "wallClock" : 9995008416 -} diff --git a/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json b/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json deleted file mode 100644 index 03e257ef..00000000 --- a/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "allocatedResidentMemory" : 46530560, - "cpuTotal" : 20000000, - "objectAllocCount" : 0, - "releaseCount" : 0, - "retainCount" : 0, - "retainReleaseDelta" : 0, - "throughput" : 2, - "wallClock" : 621817625 -} diff --git a/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json b/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json deleted file mode 100644 index 6d2137ba..00000000 --- a/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "allocatedResidentMemory" : 48234496, - "cpuTotal" : 400000000, - "objectAllocCount" : 0, - "releaseCount" : 0, - "retainCount" : 0, - "retainReleaseDelta" : 0, - "throughput" : 1, - "wallClock" : 1506038376 -} diff --git a/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json b/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json deleted file mode 100644 index aee9e223..00000000 --- a/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "allocatedResidentMemory" : 77463551, - "cpuTotal" : 70057983, - "objectAllocCount" : 4647, - "releaseCount" : 25903, - "retainCount" : 18863, - "retainReleaseDelta" : 2399, - "throughput" : 8, - "wallClock" : 9994972543 -} diff --git a/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json b/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json deleted file mode 100644 index 0ecf5a24..00000000 --- a/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "allocatedResidentMemory" : 72351744, - "cpuTotal" : 240000000, - "objectAllocCount" : 7980, - "releaseCount" : 34126, - "retainCount" : 22195, - "retainReleaseDelta" : 3951, - "throughput" : 2, - "wallClock" : 9995008416 -} diff --git a/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json b/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json deleted file mode 100644 index 03e257ef..00000000 --- a/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "allocatedResidentMemory" : 46530560, - "cpuTotal" : 20000000, - "objectAllocCount" : 0, - "releaseCount" : 0, - "retainCount" : 0, - "retainReleaseDelta" : 0, - "throughput" : 2, - "wallClock" : 621817625 -} diff --git a/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json b/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json deleted file mode 100644 index 6d2137ba..00000000 --- a/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "allocatedResidentMemory" : 48234496, - "cpuTotal" : 400000000, - "objectAllocCount" : 0, - "releaseCount" : 0, - "retainCount" : 0, - "retainReleaseDelta" : 0, - "throughput" : 1, - "wallClock" : 1506038376 -} diff --git a/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json b/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json deleted file mode 100644 index aee9e223..00000000 --- a/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "allocatedResidentMemory" : 77463551, - "cpuTotal" : 70057983, - "objectAllocCount" : 4647, - "releaseCount" : 25903, - "retainCount" : 18863, - "retainReleaseDelta" : 2399, - "throughput" : 8, - "wallClock" : 9994972543 -} diff --git a/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json b/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json deleted file mode 100644 index 0ecf5a24..00000000 --- a/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "allocatedResidentMemory" : 72351744, - "cpuTotal" : 240000000, - "objectAllocCount" : 7980, - "releaseCount" : 34126, - "retainCount" : 22195, - "retainReleaseDelta" : 3951, - "throughput" : 2, - "wallClock" : 9995008416 -} diff --git a/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json b/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json deleted file mode 100644 index 03e257ef..00000000 --- a/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "allocatedResidentMemory" : 46530560, - "cpuTotal" : 20000000, - "objectAllocCount" : 0, - "releaseCount" : 0, - "retainCount" : 0, - "retainReleaseDelta" : 0, - "throughput" : 2, - "wallClock" : 621817625 -} diff --git a/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json b/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json deleted file mode 100644 index 6d2137ba..00000000 --- a/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "allocatedResidentMemory" : 48234496, - "cpuTotal" : 400000000, - "objectAllocCount" : 0, - "releaseCount" : 0, - "retainCount" : 0, - "retainReleaseDelta" : 0, - "throughput" : 1, - "wallClock" : 1506038376 -} diff --git a/dev/test-benchmark-thresholds.sh b/dev/test-benchmark-thresholds.sh new file mode 100644 index 00000000..f93ea583 --- /dev/null +++ b/dev/test-benchmark-thresholds.sh @@ -0,0 +1,27 @@ +cd Benchmarks +swift package --disable-sandbox benchmark baseline update PR --no-progress +git checkout main +swift package --disable-sandbox benchmark baseline update main --no-progress + +swift package benchmark baseline check main PR +BENCHMARK_RESULT=$? + +echo "Retcode is $BENCHMARK_RESULT" + +if [ $BENCHMARK_RESULT -eq 0 ]; then + echo "Benchmark results are the same as for main" +fi + +if [ $BENCHMARK_RESULT -eq 4 ]; then + echo "Benchmark results are better as for main" +fi + +if [ $BENCHMARK_RESULT -eq 1 ]; then + echo "Benchmark failed" + exit 1 +fi + +if [ $BENCHMARK_RESULT -eq 2 ]; then + echo "Benchmark results are worse than main" + exit 1 +fi diff --git a/docker/docker-compose.yaml b/docker/docker-compose.yaml index 888922c8..2dce8dff 100644 --- a/docker/docker-compose.yaml +++ b/docker/docker-compose.yaml @@ -55,8 +55,7 @@ services: command: > /bin/bash -xcl " swift build --build-tests $${SANITIZER_ARG-} && \ - swift $${SWIFT_TEST_VERB-test} $${WARN_AS_ERROR_ARG-} $${SANITIZER_ARG-} $${IMPORT_CHECK_ARG-} $${STRICT_CONCURRENCY_ARG-} && \ - cd Benchmarks && swift package --disable-sandbox benchmark baseline check --check-absolute-path Thresholds/$${SWIFT_VERSION-}/ + swift $${SWIFT_TEST_VERB-test} $${WARN_AS_ERROR_ARG-} $${SANITIZER_ARG-} $${IMPORT_CHECK_ARG-} $${STRICT_CONCURRENCY_ARG-} " benchmark: From 01779edae1b2d58ac2574a4caebcae6823cc15d7 Mon Sep 17 00:00:00 2001 From: BlindSpot <127803250+blindspotbounty@users.noreply.github.com> Date: Tue, 27 Feb 2024 12:16:26 +0300 Subject: [PATCH 15/25] add header --- dev/test-benchmark-thresholds.sh | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/dev/test-benchmark-thresholds.sh b/dev/test-benchmark-thresholds.sh index f93ea583..731c3e97 100644 --- a/dev/test-benchmark-thresholds.sh +++ b/dev/test-benchmark-thresholds.sh @@ -1,3 +1,18 @@ +#!/bin/bash +##===----------------------------------------------------------------------===## +## +## This source file is part of the swift-kafka-client open source project +## +## Copyright (c) YEARS Apple Inc. and the swift-kafka-client project authors +## Licensed under Apache License v2.0 +## +## See LICENSE.txt for license information +## See CONTRIBUTORS.txt for the list of swift-kafka-client project authors +## +## SPDX-License-Identifier: Apache-2.0 +## +##===----------------------------------------------------------------------===## + cd Benchmarks swift package --disable-sandbox benchmark baseline update PR --no-progress git checkout main From df17518b5c33e634f5cdf9d2093b5604d2bc6947 Mon Sep 17 00:00:00 2001 From: BlindSpot <127803250+blindspotbounty@users.noreply.github.com> Date: Mon, 4 Mar 2024 12:46:06 +0200 Subject: [PATCH 16/25] add stable metrics --- .../KafkaConsumerBenchmark.swift | 4 +++- .../SwiftKafkaConsumerBenchmarks/Utilities.swift | 13 +++++++++++++ ...kaConsumer_basic_consumer_messages_1000.p90.json | 10 ++++++++++ ...nsumer_with_offset_commit_messages_1000.p90.json | 10 ++++++++++ ...librdkafka_basic_consumer_messages_1000.p90.json | 10 ++++++++++ ...dkafka_with_offset_commit_messages_1000.p90.json | 10 ++++++++++ ...kaConsumer_basic_consumer_messages_1000.p90.json | 10 ++++++++++ ...nsumer_with_offset_commit_messages_1000.p90.json | 10 ++++++++++ ...librdkafka_basic_consumer_messages_1000.p90.json | 10 ++++++++++ ...dkafka_with_offset_commit_messages_1000.p90.json | 10 ++++++++++ ...kaConsumer_basic_consumer_messages_1000.p90.json | 10 ++++++++++ ...nsumer_with_offset_commit_messages_1000.p90.json | 10 ++++++++++ ...librdkafka_basic_consumer_messages_1000.p90.json | 10 ++++++++++ ...dkafka_with_offset_commit_messages_1000.p90.json | 10 ++++++++++ ...kaConsumer_basic_consumer_messages_1000.p90.json | 10 ++++++++++ ...nsumer_with_offset_commit_messages_1000.p90.json | 10 ++++++++++ ...librdkafka_basic_consumer_messages_1000.p90.json | 10 ++++++++++ ...dkafka_with_offset_commit_messages_1000.p90.json | 10 ++++++++++ ...kaConsumer_basic_consumer_messages_1000.p90.json | 10 ++++++++++ ...nsumer_with_offset_commit_messages_1000.p90.json | 10 ++++++++++ ...librdkafka_basic_consumer_messages_1000.p90.json | 10 ++++++++++ ...dkafka_with_offset_commit_messages_1000.p90.json | 10 ++++++++++ 22 files changed, 216 insertions(+), 1 deletion(-) create mode 100644 Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json create mode 100644 Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json create mode 100644 Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json create mode 100644 Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json create mode 100644 Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json create mode 100644 Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json create mode 100644 Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json create mode 100644 Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json create mode 100644 Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json create mode 100644 Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json create mode 100644 Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json create mode 100644 Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json create mode 100644 Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json create mode 100644 Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json create mode 100644 Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json create mode 100644 Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json create mode 100644 Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json create mode 100644 Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json create mode 100644 Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json create mode 100644 Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json diff --git a/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/KafkaConsumerBenchmark.swift b/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/KafkaConsumerBenchmark.swift index 5177d8df..da0ff9a2 100644 --- a/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/KafkaConsumerBenchmark.swift +++ b/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/KafkaConsumerBenchmark.swift @@ -21,12 +21,14 @@ import Kafka import Logging import ServiceLifecycle + + let benchmarks = { var uniqueTestTopic: String! let messageCount: UInt = 1000 Benchmark.defaultConfiguration = .init( - metrics: [.wallClock, .cpuTotal, .allocatedResidentMemory, .contextSwitches, .throughput] + .arc, + metrics: metricsToMeasure, warmupIterations: 0, scalingFactor: .one, maxDuration: .seconds(5), diff --git a/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/Utilities.swift b/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/Utilities.swift index 304dc1fb..6518fd9d 100644 --- a/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/Utilities.swift +++ b/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/Utilities.swift @@ -126,3 +126,16 @@ extension Benchmark { return try await body() } } + +fileprivate let stableBenchmarkMetrics: [BenchmarkMetric] = [ + .allocatedResidentMemory, +] + .arc + +fileprivate let allMetricsToMeasure: [BenchmarkMetric] = [ + .wallClock, + .cpuTotal, + .contextSwitches, + .throughput +] + stableBenchmarkMetrics + +let metricsToMeasure: [BenchmarkMetric] = (Bool(ProcessInfo.processInfo.environment["KAFKA_USE_STABLE_BENCHMARK_METRICS"] ?? "false") == true) ? stableBenchmarkMetrics : allMetricsToMeasure diff --git a/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json b/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json new file mode 100644 index 00000000..0503fc0c --- /dev/null +++ b/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json @@ -0,0 +1,10 @@ +{ + "allocatedResidentMemory" : 70516736, + "cpuTotal" : 290000000, + "objectAllocCount" : 6939, + "releaseCount" : 36944, + "retainCount" : 26088, + "retainReleaseDelta" : 3921, + "throughput" : 2, + "wallClock" : 636803626 +} \ No newline at end of file diff --git a/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json b/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json new file mode 100644 index 00000000..7bde2ec9 --- /dev/null +++ b/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json @@ -0,0 +1,10 @@ +{ + "allocatedResidentMemory" : 71761920, + "cpuTotal" : 220000000, + "objectAllocCount" : 7986, + "releaseCount" : 39057, + "retainCount" : 27123, + "retainReleaseDelta" : 3948, + "throughput" : 2, + "wallClock" : 628638500 +} \ No newline at end of file diff --git a/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json b/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json new file mode 100644 index 00000000..1358e408 --- /dev/null +++ b/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json @@ -0,0 +1,10 @@ +{ + "allocatedResidentMemory" : 43384832, + "cpuTotal" : 20000000, + "objectAllocCount" : 0, + "releaseCount" : 0, + "retainCount" : 0, + "retainReleaseDelta" : 0, + "throughput" : 2, + "wallClock" : 621262042 +} \ No newline at end of file diff --git a/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json b/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json new file mode 100644 index 00000000..8584e8ee --- /dev/null +++ b/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json @@ -0,0 +1,10 @@ +{ + "allocatedResidentMemory" : 47841280, + "cpuTotal" : 140000000, + "objectAllocCount" : 0, + "releaseCount" : 0, + "retainCount" : 0, + "retainReleaseDelta" : 0, + "throughput" : 1, + "wallClock" : 809207584 +} \ No newline at end of file diff --git a/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json b/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json new file mode 100644 index 00000000..b2e753ba --- /dev/null +++ b/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json @@ -0,0 +1,10 @@ +{ + "allocatedResidentMemory" : 68878336, + "cpuTotal" : 250000000, + "objectAllocCount" : 7223, + "releaseCount" : 37789, + "retainCount" : 26456, + "retainReleaseDelta" : 4110, + "throughput" : 2, + "wallClock" : 678995792 +} \ No newline at end of file diff --git a/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json b/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json new file mode 100644 index 00000000..7a81cb42 --- /dev/null +++ b/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json @@ -0,0 +1,10 @@ +{ + "allocatedResidentMemory" : 69402624, + "cpuTotal" : 210000000, + "objectAllocCount" : 8043, + "releaseCount" : 39309, + "retainCount" : 27276, + "retainReleaseDelta" : 3990, + "throughput" : 2, + "wallClock" : 643641375 +} \ No newline at end of file diff --git a/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json b/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json new file mode 100644 index 00000000..76abcc09 --- /dev/null +++ b/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json @@ -0,0 +1,10 @@ +{ + "allocatedResidentMemory" : 50069504, + "cpuTotal" : 20000000, + "objectAllocCount" : 0, + "releaseCount" : 0, + "retainCount" : 0, + "retainReleaseDelta" : 0, + "throughput" : 2, + "wallClock" : 625373958 +} \ No newline at end of file diff --git a/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json b/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json new file mode 100644 index 00000000..97a7c309 --- /dev/null +++ b/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json @@ -0,0 +1,10 @@ +{ + "allocatedResidentMemory" : 51773440, + "cpuTotal" : 140000000, + "objectAllocCount" : 0, + "releaseCount" : 0, + "retainCount" : 0, + "retainReleaseDelta" : 0, + "throughput" : 3, + "wallClock" : 312475647 +} \ No newline at end of file diff --git a/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json b/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json new file mode 100644 index 00000000..2875624b --- /dev/null +++ b/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json @@ -0,0 +1,10 @@ +{ + "allocatedResidentMemory" : 70123520, + "cpuTotal" : 230000000, + "objectAllocCount" : 7026, + "releaseCount" : 32263, + "retainCount" : 21255, + "retainReleaseDelta" : 3982, + "throughput" : 2, + "wallClock" : 635520125 +} \ No newline at end of file diff --git a/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json b/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json new file mode 100644 index 00000000..a17fd1db --- /dev/null +++ b/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json @@ -0,0 +1,10 @@ +{ + "allocatedResidentMemory" : 76808191, + "cpuTotal" : 40009727, + "objectAllocCount" : 5611, + "releaseCount" : 27807, + "retainCount" : 19839, + "retainReleaseDelta" : 2371, + "throughput" : 9, + "wallClock" : 117243903 +} \ No newline at end of file diff --git a/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json b/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json new file mode 100644 index 00000000..776ec643 --- /dev/null +++ b/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json @@ -0,0 +1,10 @@ +{ + "allocatedResidentMemory" : 45219840, + "cpuTotal" : 20000000, + "objectAllocCount" : 0, + "releaseCount" : 0, + "retainCount" : 0, + "retainReleaseDelta" : 0, + "throughput" : 2, + "wallClock" : 618535750 +} \ No newline at end of file diff --git a/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json b/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json new file mode 100644 index 00000000..cbe4bbe3 --- /dev/null +++ b/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json @@ -0,0 +1,10 @@ +{ + "allocatedResidentMemory" : 49217536, + "cpuTotal" : 160000000, + "objectAllocCount" : 0, + "releaseCount" : 0, + "retainCount" : 0, + "retainReleaseDelta" : 0, + "throughput" : 1, + "wallClock" : 833637292 +} \ No newline at end of file diff --git a/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json b/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json new file mode 100644 index 00000000..cdd2e71a --- /dev/null +++ b/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json @@ -0,0 +1,10 @@ +{ + "allocatedResidentMemory" : 74121216, + "cpuTotal" : 260000000, + "objectAllocCount" : 6983, + "releaseCount" : 32103, + "retainCount" : 21175, + "retainReleaseDelta" : 3951, + "throughput" : 2, + "wallClock" : 632957792 +} \ No newline at end of file diff --git a/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json b/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json new file mode 100644 index 00000000..d4d959d3 --- /dev/null +++ b/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json @@ -0,0 +1,10 @@ +{ + "allocatedResidentMemory" : 75300864, + "cpuTotal" : 220000000, + "objectAllocCount" : 7962, + "releaseCount" : 34047, + "retainCount" : 22148, + "retainReleaseDelta" : 3937, + "throughput" : 2, + "wallClock" : 626389251 +} \ No newline at end of file diff --git a/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json b/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json new file mode 100644 index 00000000..4a7f61fc --- /dev/null +++ b/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json @@ -0,0 +1,10 @@ +{ + "allocatedResidentMemory" : 48496640, + "cpuTotal" : 10002431, + "objectAllocCount" : 0, + "releaseCount" : 0, + "retainCount" : 0, + "retainReleaseDelta" : 0, + "throughput" : 9, + "wallClock" : 111149055 +} \ No newline at end of file diff --git a/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json b/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json new file mode 100644 index 00000000..c887bece --- /dev/null +++ b/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json @@ -0,0 +1,10 @@ +{ + "allocatedResidentMemory" : 45678592, + "cpuTotal" : 150000000, + "objectAllocCount" : 0, + "releaseCount" : 0, + "retainCount" : 0, + "retainReleaseDelta" : 0, + "throughput" : 1, + "wallClock" : 802045209 +} \ No newline at end of file diff --git a/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json b/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json new file mode 100644 index 00000000..301210f8 --- /dev/null +++ b/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json @@ -0,0 +1,10 @@ +{ + "allocatedResidentMemory" : 73859072, + "cpuTotal" : 350000000, + "objectAllocCount" : 6963, + "releaseCount" : 37006, + "retainCount" : 26112, + "retainReleaseDelta" : 3936, + "throughput" : 2, + "wallClock" : 631028000 +} \ No newline at end of file diff --git a/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json b/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json new file mode 100644 index 00000000..b89b39c1 --- /dev/null +++ b/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json @@ -0,0 +1,10 @@ +{ + "allocatedResidentMemory" : 81199103, + "cpuTotal" : 40009727, + "objectAllocCount" : 5607, + "releaseCount" : 32719, + "retainCount" : 24767, + "retainReleaseDelta" : 2379, + "throughput" : 9, + "wallClock" : 117309439 +} \ No newline at end of file diff --git a/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json b/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json new file mode 100644 index 00000000..90f357eb --- /dev/null +++ b/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json @@ -0,0 +1,10 @@ +{ + "allocatedResidentMemory" : 46727168, + "cpuTotal" : 20000000, + "objectAllocCount" : 0, + "releaseCount" : 0, + "retainCount" : 0, + "retainReleaseDelta" : 0, + "throughput" : 2, + "wallClock" : 626312334 +} \ No newline at end of file diff --git a/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json b/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json new file mode 100644 index 00000000..d4dc6d79 --- /dev/null +++ b/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json @@ -0,0 +1,10 @@ +{ + "allocatedResidentMemory" : 45973503, + "cpuTotal" : 140000000, + "objectAllocCount" : 0, + "releaseCount" : 0, + "retainCount" : 0, + "retainReleaseDelta" : 0, + "throughput" : 4, + "wallClock" : 311689215 +} \ No newline at end of file From 3255efcd6e1253343e47315ab0ea5eba6d17346d Mon Sep 17 00:00:00 2001 From: BlindSpot <127803250+blindspotbounty@users.noreply.github.com> Date: Mon, 4 Mar 2024 13:13:30 +0200 Subject: [PATCH 17/25] update thresholds to stable metrics only --- .../KafkaConsumerBenchmark.swift | 15 +------- .../Utilities.swift | 38 +++++++++++++++++-- ...umer_basic_consumer_messages_1000.p90.json | 13 +++---- ..._with_offset_commit_messages_1000.p90.json | 13 +++---- ...afka_basic_consumer_messages_1000.p90.json | 7 +--- ..._with_offset_commit_messages_1000.p90.json | 7 +--- ...umer_basic_consumer_messages_1000.p90.json | 13 +++---- ..._with_offset_commit_messages_1000.p90.json | 13 +++---- ...afka_basic_consumer_messages_1000.p90.json | 7 +--- ..._with_offset_commit_messages_1000.p90.json | 7 +--- ...umer_basic_consumer_messages_1000.p90.json | 13 +++---- ..._with_offset_commit_messages_1000.p90.json | 7 +--- ...afka_basic_consumer_messages_1000.p90.json | 7 +--- ..._with_offset_commit_messages_1000.p90.json | 7 +--- ...umer_basic_consumer_messages_1000.p90.json | 13 +++---- ..._with_offset_commit_messages_1000.p90.json | 13 +++---- ...afka_basic_consumer_messages_1000.p90.json | 7 +--- ..._with_offset_commit_messages_1000.p90.json | 7 +--- ...umer_basic_consumer_messages_1000.p90.json | 13 +++---- ..._with_offset_commit_messages_1000.p90.json | 13 +++---- ...afka_basic_consumer_messages_1000.p90.json | 7 +--- ..._with_offset_commit_messages_1000.p90.json | 7 +--- docker/docker-compose.yaml | 6 ++- 23 files changed, 107 insertions(+), 146 deletions(-) diff --git a/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/KafkaConsumerBenchmark.swift b/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/KafkaConsumerBenchmark.swift index da0ff9a2..1a612412 100644 --- a/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/KafkaConsumerBenchmark.swift +++ b/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/KafkaConsumerBenchmark.swift @@ -21,8 +21,6 @@ import Kafka import Logging import ServiceLifecycle - - let benchmarks = { var uniqueTestTopic: String! let messageCount: UInt = 1000 @@ -33,18 +31,7 @@ let benchmarks = { scalingFactor: .one, maxDuration: .seconds(5), maxIterations: 100, - thresholds: [ - // Thresholds are wild guess mostly. Have to adjust with time. - .wallClock: .init(relative: [.p90: 10]), - .cpuTotal: .init(relative: [.p90: 10]), - .allocatedResidentMemory: .init(relative: [.p90: 20]), - .contextSwitches: .init(relative: [.p90: 10]), - .throughput: .init(relative: [.p90: 10]), - .objectAllocCount: .init(relative: [.p90: 10]), - .retainCount: .init(relative: [.p90: 10]), - .releaseCount: .init(relative: [.p90: 10]), - .retainReleaseDelta: .init(relative: [.p90: 10]), - ] + thresholds: metricsThreasholds ) Benchmark.setup = { diff --git a/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/Utilities.swift b/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/Utilities.swift index 6518fd9d..af5ff98b 100644 --- a/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/Utilities.swift +++ b/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/Utilities.swift @@ -127,15 +127,45 @@ extension Benchmark { } } -fileprivate let stableBenchmarkMetrics: [BenchmarkMetric] = [ +private let stableBenchmarkMetrics: [BenchmarkMetric] = [ .allocatedResidentMemory, ] + .arc -fileprivate let allMetricsToMeasure: [BenchmarkMetric] = [ +private let allMetricsToMeasure: [BenchmarkMetric] = [ .wallClock, .cpuTotal, .contextSwitches, - .throughput + .throughput, ] + stableBenchmarkMetrics -let metricsToMeasure: [BenchmarkMetric] = (Bool(ProcessInfo.processInfo.environment["KAFKA_USE_STABLE_BENCHMARK_METRICS"] ?? "false") == true) ? stableBenchmarkMetrics : allMetricsToMeasure +private let useStableMetrics = Bool(ProcessInfo.processInfo.environment["KAFKA_USE_STABLE_BENCHMARK_METRICS"] ?? "false") == true + +let metricsToMeasure: [BenchmarkMetric] = + useStableMetrics + ? stableBenchmarkMetrics + : allMetricsToMeasure + +let stableMetricsThreasholds: [BenchmarkMetric: BenchmarkThresholds] = [ + .allocatedResidentMemory: .init(relative: [.p90: 10]), + .objectAllocCount: .init(relative: [.p90: 10]), + .retainCount: .init(relative: [.p90: 10]), + .releaseCount: .init(relative: [.p90: 10]), + .retainReleaseDelta: .init(relative: [.p90: 20]), +] + +let allMetricsThreasholds: [BenchmarkMetric: BenchmarkThresholds] = [ + .wallClock: .init(relative: [.p90: 15]), + .cpuTotal: .init(relative: [.p90: 15]), + .allocatedResidentMemory: .init(relative: [.p90: 10]), + .contextSwitches: .init(relative: [.p90: 15]), + .throughput: .init(relative: [.p90: 15]), + .objectAllocCount: .init(relative: [.p90: 10]), + .retainCount: .init(relative: [.p90: 10]), + .releaseCount: .init(relative: [.p90: 10]), + .retainReleaseDelta: .init(relative: [.p90: 20]), +] + +let metricsThreasholds: [BenchmarkMetric: BenchmarkThresholds] = + useStableMetrics + ? stableMetricsThreasholds + : allMetricsThreasholds diff --git a/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json b/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json index 0503fc0c..3c09d19a 100644 --- a/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json +++ b/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json @@ -1,10 +1,7 @@ { - "allocatedResidentMemory" : 70516736, - "cpuTotal" : 290000000, - "objectAllocCount" : 6939, - "releaseCount" : 36944, - "retainCount" : 26088, - "retainReleaseDelta" : 3921, - "throughput" : 2, - "wallClock" : 636803626 + "allocatedResidentMemory" : 72679423, + "objectAllocCount" : 4631, + "releaseCount" : 30798, + "retainCount" : 23775, + "retainReleaseDelta" : 2381 } \ No newline at end of file diff --git a/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json b/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json index 7bde2ec9..a3004c1f 100644 --- a/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json +++ b/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json @@ -1,10 +1,7 @@ { - "allocatedResidentMemory" : 71761920, - "cpuTotal" : 220000000, - "objectAllocCount" : 7986, - "releaseCount" : 39057, - "retainCount" : 27123, - "retainReleaseDelta" : 3948, - "throughput" : 2, - "wallClock" : 628638500 + "allocatedResidentMemory" : 66650112, + "objectAllocCount" : 7959, + "releaseCount" : 38994, + "retainCount" : 27103, + "retainReleaseDelta" : 3934 } \ No newline at end of file diff --git a/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json b/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json index 1358e408..60b05547 100644 --- a/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json +++ b/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json @@ -1,10 +1,7 @@ { - "allocatedResidentMemory" : 43384832, - "cpuTotal" : 20000000, + "allocatedResidentMemory" : 47775744, "objectAllocCount" : 0, "releaseCount" : 0, "retainCount" : 0, - "retainReleaseDelta" : 0, - "throughput" : 2, - "wallClock" : 621262042 + "retainReleaseDelta" : 0 } \ No newline at end of file diff --git a/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json b/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json index 8584e8ee..304c7328 100644 --- a/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json +++ b/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json @@ -1,10 +1,7 @@ { - "allocatedResidentMemory" : 47841280, - "cpuTotal" : 140000000, + "allocatedResidentMemory" : 46006272, "objectAllocCount" : 0, "releaseCount" : 0, "retainCount" : 0, - "retainReleaseDelta" : 0, - "throughput" : 1, - "wallClock" : 809207584 + "retainReleaseDelta" : 0 } \ No newline at end of file diff --git a/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json b/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json index b2e753ba..524ca722 100644 --- a/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json +++ b/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json @@ -1,10 +1,7 @@ { - "allocatedResidentMemory" : 68878336, - "cpuTotal" : 250000000, - "objectAllocCount" : 7223, - "releaseCount" : 37789, - "retainCount" : 26456, - "retainReleaseDelta" : 4110, - "throughput" : 2, - "wallClock" : 678995792 + "allocatedResidentMemory" : 71565312, + "objectAllocCount" : 7062, + "releaseCount" : 37374, + "retainCount" : 26309, + "retainReleaseDelta" : 4003 } \ No newline at end of file diff --git a/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json b/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json index 7a81cb42..1e6c8da4 100644 --- a/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json +++ b/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json @@ -1,10 +1,7 @@ { - "allocatedResidentMemory" : 69402624, - "cpuTotal" : 210000000, - "objectAllocCount" : 8043, - "releaseCount" : 39309, - "retainCount" : 27276, - "retainReleaseDelta" : 3990, - "throughput" : 2, - "wallClock" : 643641375 + "allocatedResidentMemory" : 68157440, + "objectAllocCount" : 7956, + "releaseCount" : 39081, + "retainCount" : 27190, + "retainReleaseDelta" : 3935 } \ No newline at end of file diff --git a/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json b/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json index 76abcc09..2f4db2c7 100644 --- a/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json +++ b/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json @@ -1,10 +1,7 @@ { - "allocatedResidentMemory" : 50069504, - "cpuTotal" : 20000000, + "allocatedResidentMemory" : 48693248, "objectAllocCount" : 0, "releaseCount" : 0, "retainCount" : 0, - "retainReleaseDelta" : 0, - "throughput" : 2, - "wallClock" : 625373958 + "retainReleaseDelta" : 0 } \ No newline at end of file diff --git a/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json b/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json index 97a7c309..c02556e8 100644 --- a/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json +++ b/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json @@ -1,10 +1,7 @@ { - "allocatedResidentMemory" : 51773440, - "cpuTotal" : 140000000, + "allocatedResidentMemory" : 49152000, "objectAllocCount" : 0, "releaseCount" : 0, "retainCount" : 0, - "retainReleaseDelta" : 0, - "throughput" : 3, - "wallClock" : 312475647 + "retainReleaseDelta" : 0 } \ No newline at end of file diff --git a/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json b/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json index 2875624b..782e5b25 100644 --- a/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json +++ b/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json @@ -1,10 +1,7 @@ { - "allocatedResidentMemory" : 70123520, - "cpuTotal" : 230000000, - "objectAllocCount" : 7026, - "releaseCount" : 32263, - "retainCount" : 21255, - "retainReleaseDelta" : 3982, - "throughput" : 2, - "wallClock" : 635520125 + "allocatedResidentMemory" : 65404928, + "objectAllocCount" : 6980, + "releaseCount" : 32127, + "retainCount" : 21198, + "retainReleaseDelta" : 3952 } \ No newline at end of file diff --git a/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json b/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json index a17fd1db..62999938 100644 --- a/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json +++ b/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json @@ -1,10 +1,7 @@ { - "allocatedResidentMemory" : 76808191, - "cpuTotal" : 40009727, + "allocatedResidentMemory" : 74317823, "objectAllocCount" : 5611, "releaseCount" : 27807, "retainCount" : 19839, - "retainReleaseDelta" : 2371, - "throughput" : 9, - "wallClock" : 117243903 + "retainReleaseDelta" : 2373 } \ No newline at end of file diff --git a/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json b/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json index 776ec643..def8eeb0 100644 --- a/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json +++ b/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json @@ -1,10 +1,7 @@ { - "allocatedResidentMemory" : 45219840, - "cpuTotal" : 20000000, + "allocatedResidentMemory" : 42270720, "objectAllocCount" : 0, "releaseCount" : 0, "retainCount" : 0, - "retainReleaseDelta" : 0, - "throughput" : 2, - "wallClock" : 618535750 + "retainReleaseDelta" : 0 } \ No newline at end of file diff --git a/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json b/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json index cbe4bbe3..6d01e39f 100644 --- a/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json +++ b/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json @@ -1,10 +1,7 @@ { - "allocatedResidentMemory" : 49217536, - "cpuTotal" : 160000000, + "allocatedResidentMemory" : 41123839, "objectAllocCount" : 0, "releaseCount" : 0, "retainCount" : 0, - "retainReleaseDelta" : 0, - "throughput" : 1, - "wallClock" : 833637292 + "retainReleaseDelta" : 0 } \ No newline at end of file diff --git a/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json b/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json index cdd2e71a..7b1c657e 100644 --- a/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json +++ b/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json @@ -1,10 +1,7 @@ { - "allocatedResidentMemory" : 74121216, - "cpuTotal" : 260000000, - "objectAllocCount" : 6983, - "releaseCount" : 32103, - "retainCount" : 21175, - "retainReleaseDelta" : 3951, - "throughput" : 2, - "wallClock" : 632957792 + "allocatedResidentMemory" : 65536000, + "objectAllocCount" : 6977, + "releaseCount" : 32088, + "retainCount" : 21163, + "retainReleaseDelta" : 3948 } \ No newline at end of file diff --git a/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json b/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json index d4d959d3..430c6b12 100644 --- a/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json +++ b/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json @@ -1,10 +1,7 @@ { - "allocatedResidentMemory" : 75300864, - "cpuTotal" : 220000000, - "objectAllocCount" : 7962, - "releaseCount" : 34047, - "retainCount" : 22148, - "retainReleaseDelta" : 3937, - "throughput" : 2, - "wallClock" : 626389251 + "allocatedResidentMemory" : 70582272, + "objectAllocCount" : 7971, + "releaseCount" : 34075, + "retainCount" : 22158, + "retainReleaseDelta" : 3946 } \ No newline at end of file diff --git a/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json b/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json index 4a7f61fc..98c7f4b0 100644 --- a/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json +++ b/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json @@ -1,10 +1,7 @@ { - "allocatedResidentMemory" : 48496640, - "cpuTotal" : 10002431, + "allocatedResidentMemory" : 46989312, "objectAllocCount" : 0, "releaseCount" : 0, "retainCount" : 0, - "retainReleaseDelta" : 0, - "throughput" : 9, - "wallClock" : 111149055 + "retainReleaseDelta" : 0 } \ No newline at end of file diff --git a/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json b/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json index c887bece..ea291b80 100644 --- a/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json +++ b/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json @@ -1,10 +1,7 @@ { - "allocatedResidentMemory" : 45678592, - "cpuTotal" : 150000000, + "allocatedResidentMemory" : 46858240, "objectAllocCount" : 0, "releaseCount" : 0, "retainCount" : 0, - "retainReleaseDelta" : 0, - "throughput" : 1, - "wallClock" : 802045209 + "retainReleaseDelta" : 0 } \ No newline at end of file diff --git a/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json b/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json index 301210f8..72395c4e 100644 --- a/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json +++ b/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json @@ -1,10 +1,7 @@ { - "allocatedResidentMemory" : 73859072, - "cpuTotal" : 350000000, - "objectAllocCount" : 6963, - "releaseCount" : 37006, - "retainCount" : 26112, - "retainReleaseDelta" : 3936, - "throughput" : 2, - "wallClock" : 631028000 + "allocatedResidentMemory" : 66977792, + "objectAllocCount" : 7034, + "releaseCount" : 37191, + "retainCount" : 26174, + "retainReleaseDelta" : 3983 } \ No newline at end of file diff --git a/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json b/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json index b89b39c1..050304ab 100644 --- a/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json +++ b/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json @@ -1,10 +1,7 @@ { - "allocatedResidentMemory" : 81199103, - "cpuTotal" : 40009727, - "objectAllocCount" : 5607, - "releaseCount" : 32719, - "retainCount" : 24767, - "retainReleaseDelta" : 2379, - "throughput" : 9, - "wallClock" : 117309439 + "allocatedResidentMemory" : 66322432, + "objectAllocCount" : 8002, + "releaseCount" : 39118, + "retainCount" : 27154, + "retainReleaseDelta" : 3962 } \ No newline at end of file diff --git a/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json b/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json index 90f357eb..893cd3c3 100644 --- a/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json +++ b/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json @@ -1,10 +1,7 @@ { - "allocatedResidentMemory" : 46727168, - "cpuTotal" : 20000000, + "allocatedResidentMemory" : 45416448, "objectAllocCount" : 0, "releaseCount" : 0, "retainCount" : 0, - "retainReleaseDelta" : 0, - "throughput" : 2, - "wallClock" : 626312334 + "retainReleaseDelta" : 0 } \ No newline at end of file diff --git a/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json b/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json index d4dc6d79..9244f104 100644 --- a/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json +++ b/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json @@ -1,10 +1,7 @@ { - "allocatedResidentMemory" : 45973503, - "cpuTotal" : 140000000, + "allocatedResidentMemory" : 46137344, "objectAllocCount" : 0, "releaseCount" : 0, "retainCount" : 0, - "retainReleaseDelta" : 0, - "throughput" : 4, - "wallClock" : 311689215 + "retainReleaseDelta" : 0 } \ No newline at end of file diff --git a/docker/docker-compose.yaml b/docker/docker-compose.yaml index 2dce8dff..829fc2a5 100644 --- a/docker/docker-compose.yaml +++ b/docker/docker-compose.yaml @@ -52,10 +52,12 @@ services: environment: SWIFT_VERSION: 5.7 KAFKA_HOST: kafka + KAFKA_USE_STABLE_BENCHMARK_METRICS: true command: > /bin/bash -xcl " swift build --build-tests $${SANITIZER_ARG-} && \ - swift $${SWIFT_TEST_VERB-test} $${WARN_AS_ERROR_ARG-} $${SANITIZER_ARG-} $${IMPORT_CHECK_ARG-} $${STRICT_CONCURRENCY_ARG-} + swift $${SWIFT_TEST_VERB-test} $${WARN_AS_ERROR_ARG-} $${SANITIZER_ARG-} $${IMPORT_CHECK_ARG-} $${STRICT_CONCURRENCY_ARG-} && \ + cd Benchmarks && swift package --disable-sandbox benchmark baseline check --check-absolute-path Thresholds/$${SWIFT_VERSION-}/ " benchmark: @@ -63,6 +65,7 @@ services: depends_on: [kafka, runtime-setup] environment: KAFKA_HOST: kafka + KAFKA_USE_STABLE_BENCHMARK_METRICS: true command: > /bin/bash -xcl " cd Benchmarks && swift package --disable-sandbox benchmark @@ -73,6 +76,7 @@ services: depends_on: [kafka, runtime-setup] environment: KAFKA_HOST: kafka + KAFKA_USE_STABLE_BENCHMARK_METRICS: true command: /bin/bash -xcl "cd Benchmarks && swift package --disable-sandbox --scratch-path .build/$${SWIFT_VERSION-}/ --allow-writing-to-package-directory benchmark --format metricP90AbsoluteThresholds --path Thresholds/$${SWIFT_VERSION-}/" # util From d316aacb28148516e120a0ea6f61d304b4cedd35 Mon Sep 17 00:00:00 2001 From: BlindSpot <127803250+blindspotbounty@users.noreply.github.com> Date: Mon, 4 Mar 2024 14:20:19 +0200 Subject: [PATCH 18/25] try use '1' instead of 'true' --- docker/docker-compose.yaml | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/docker/docker-compose.yaml b/docker/docker-compose.yaml index 829fc2a5..37812326 100644 --- a/docker/docker-compose.yaml +++ b/docker/docker-compose.yaml @@ -52,12 +52,12 @@ services: environment: SWIFT_VERSION: 5.7 KAFKA_HOST: kafka - KAFKA_USE_STABLE_BENCHMARK_METRICS: true + KAFKA_USE_STABLE_BENCHMARK_METRICS: 1 command: > /bin/bash -xcl " swift build --build-tests $${SANITIZER_ARG-} && \ swift $${SWIFT_TEST_VERB-test} $${WARN_AS_ERROR_ARG-} $${SANITIZER_ARG-} $${IMPORT_CHECK_ARG-} $${STRICT_CONCURRENCY_ARG-} && \ - cd Benchmarks && swift package --disable-sandbox benchmark baseline check --check-absolute-path Thresholds/$${SWIFT_VERSION-}/ + cd Benchmarks && swift package --disable-sandbox benchmark baseline check --check-absolute-path Thresholds/$${SWIFT_VERSION-}/ --no-progress " benchmark: @@ -65,7 +65,7 @@ services: depends_on: [kafka, runtime-setup] environment: KAFKA_HOST: kafka - KAFKA_USE_STABLE_BENCHMARK_METRICS: true + KAFKA_USE_STABLE_BENCHMARK_METRICS: 1 command: > /bin/bash -xcl " cd Benchmarks && swift package --disable-sandbox benchmark @@ -76,8 +76,8 @@ services: depends_on: [kafka, runtime-setup] environment: KAFKA_HOST: kafka - KAFKA_USE_STABLE_BENCHMARK_METRICS: true - command: /bin/bash -xcl "cd Benchmarks && swift package --disable-sandbox --scratch-path .build/$${SWIFT_VERSION-}/ --allow-writing-to-package-directory benchmark --format metricP90AbsoluteThresholds --path Thresholds/$${SWIFT_VERSION-}/" + KAFKA_USE_STABLE_BENCHMARK_METRICS: 1 + command: /bin/bash -xcl "cd Benchmarks && swift package --disable-sandbox --scratch-path .build/$${SWIFT_VERSION-}/ --allow-writing-to-package-directory benchmark --format metricP90AbsoluteThresholds --path Thresholds/$${SWIFT_VERSION-}/ --no-progress" # util From 4b7bbdd8ead3a1c14ffa0777b0191cc8868e3c05 Mon Sep 17 00:00:00 2001 From: BlindSpot <127803250+blindspotbounty@users.noreply.github.com> Date: Mon, 4 Mar 2024 15:02:28 +0200 Subject: [PATCH 19/25] adjust thresholds to CI results (as temporary measure) --- ...fkaConsumer_basic_consumer_messages_1000.p90.json | 12 ++++++------ ...onsumer_with_offset_commit_messages_1000.p90.json | 12 ++++++------ ....librdkafka_basic_consumer_messages_1000.p90.json | 4 ++-- ...rdkafka_with_offset_commit_messages_1000.p90.json | 4 ++-- ...fkaConsumer_basic_consumer_messages_1000.p90.json | 12 ++++++------ ...onsumer_with_offset_commit_messages_1000.p90.json | 12 ++++++------ ....librdkafka_basic_consumer_messages_1000.p90.json | 4 ++-- ...rdkafka_with_offset_commit_messages_1000.p90.json | 4 ++-- ...fkaConsumer_basic_consumer_messages_1000.p90.json | 12 ++++++------ ...onsumer_with_offset_commit_messages_1000.p90.json | 12 ++++++------ ....librdkafka_basic_consumer_messages_1000.p90.json | 4 ++-- ...rdkafka_with_offset_commit_messages_1000.p90.json | 4 ++-- ...fkaConsumer_basic_consumer_messages_1000.p90.json | 12 ++++++------ ...onsumer_with_offset_commit_messages_1000.p90.json | 4 ++-- ....librdkafka_basic_consumer_messages_1000.p90.json | 4 ++-- ...rdkafka_with_offset_commit_messages_1000.p90.json | 4 ++-- ...fkaConsumer_basic_consumer_messages_1000.p90.json | 12 ++++++------ ...onsumer_with_offset_commit_messages_1000.p90.json | 12 ++++++------ ....librdkafka_basic_consumer_messages_1000.p90.json | 4 ++-- ...rdkafka_with_offset_commit_messages_1000.p90.json | 4 ++-- 20 files changed, 76 insertions(+), 76 deletions(-) diff --git a/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json b/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json index 3c09d19a..206c0eab 100644 --- a/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json +++ b/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json @@ -1,7 +1,7 @@ { - "allocatedResidentMemory" : 72679423, - "objectAllocCount" : 4631, - "releaseCount" : 30798, - "retainCount" : 23775, - "retainReleaseDelta" : 2381 -} \ No newline at end of file + "allocatedResidentMemory" : 12536000, + "objectAllocCount" : 4103, + "releaseCount" : 24088, + "retainCount" : 18163, + "retainReleaseDelta" : 2033 +} diff --git a/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json b/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json index a3004c1f..59d4cc02 100644 --- a/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json +++ b/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json @@ -1,7 +1,7 @@ { - "allocatedResidentMemory" : 66650112, - "objectAllocCount" : 7959, - "releaseCount" : 38994, - "retainCount" : 27103, - "retainReleaseDelta" : 3934 -} \ No newline at end of file + "allocatedResidentMemory" : 12582272, + "objectAllocCount" : 7971, + "releaseCount" : 34075, + "retainCount" : 22158, + "retainReleaseDelta" : 3946 +} diff --git a/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json b/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json index 60b05547..1e51ca6b 100644 --- a/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json +++ b/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json @@ -1,7 +1,7 @@ { - "allocatedResidentMemory" : 47775744, + "allocatedResidentMemory" : 12989312, "objectAllocCount" : 0, "releaseCount" : 0, "retainCount" : 0, "retainReleaseDelta" : 0 -} \ No newline at end of file +} diff --git a/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json b/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json index 304c7328..d675b133 100644 --- a/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json +++ b/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json @@ -1,7 +1,7 @@ { - "allocatedResidentMemory" : 46006272, + "allocatedResidentMemory" : 10858240, "objectAllocCount" : 0, "releaseCount" : 0, "retainCount" : 0, "retainReleaseDelta" : 0 -} \ No newline at end of file +} diff --git a/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json b/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json index 524ca722..206c0eab 100644 --- a/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json +++ b/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json @@ -1,7 +1,7 @@ { - "allocatedResidentMemory" : 71565312, - "objectAllocCount" : 7062, - "releaseCount" : 37374, - "retainCount" : 26309, - "retainReleaseDelta" : 4003 -} \ No newline at end of file + "allocatedResidentMemory" : 12536000, + "objectAllocCount" : 4103, + "releaseCount" : 24088, + "retainCount" : 18163, + "retainReleaseDelta" : 2033 +} diff --git a/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json b/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json index 1e6c8da4..59d4cc02 100644 --- a/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json +++ b/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json @@ -1,7 +1,7 @@ { - "allocatedResidentMemory" : 68157440, - "objectAllocCount" : 7956, - "releaseCount" : 39081, - "retainCount" : 27190, - "retainReleaseDelta" : 3935 -} \ No newline at end of file + "allocatedResidentMemory" : 12582272, + "objectAllocCount" : 7971, + "releaseCount" : 34075, + "retainCount" : 22158, + "retainReleaseDelta" : 3946 +} diff --git a/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json b/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json index 2f4db2c7..1e51ca6b 100644 --- a/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json +++ b/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json @@ -1,7 +1,7 @@ { - "allocatedResidentMemory" : 48693248, + "allocatedResidentMemory" : 12989312, "objectAllocCount" : 0, "releaseCount" : 0, "retainCount" : 0, "retainReleaseDelta" : 0 -} \ No newline at end of file +} diff --git a/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json b/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json index c02556e8..d675b133 100644 --- a/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json +++ b/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json @@ -1,7 +1,7 @@ { - "allocatedResidentMemory" : 49152000, + "allocatedResidentMemory" : 10858240, "objectAllocCount" : 0, "releaseCount" : 0, "retainCount" : 0, "retainReleaseDelta" : 0 -} \ No newline at end of file +} diff --git a/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json b/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json index 782e5b25..206c0eab 100644 --- a/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json +++ b/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json @@ -1,7 +1,7 @@ { - "allocatedResidentMemory" : 65404928, - "objectAllocCount" : 6980, - "releaseCount" : 32127, - "retainCount" : 21198, - "retainReleaseDelta" : 3952 -} \ No newline at end of file + "allocatedResidentMemory" : 12536000, + "objectAllocCount" : 4103, + "releaseCount" : 24088, + "retainCount" : 18163, + "retainReleaseDelta" : 2033 +} diff --git a/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json b/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json index 62999938..59d4cc02 100644 --- a/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json +++ b/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json @@ -1,7 +1,7 @@ { - "allocatedResidentMemory" : 74317823, - "objectAllocCount" : 5611, - "releaseCount" : 27807, - "retainCount" : 19839, - "retainReleaseDelta" : 2373 -} \ No newline at end of file + "allocatedResidentMemory" : 12582272, + "objectAllocCount" : 7971, + "releaseCount" : 34075, + "retainCount" : 22158, + "retainReleaseDelta" : 3946 +} diff --git a/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json b/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json index def8eeb0..1e51ca6b 100644 --- a/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json +++ b/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json @@ -1,7 +1,7 @@ { - "allocatedResidentMemory" : 42270720, + "allocatedResidentMemory" : 12989312, "objectAllocCount" : 0, "releaseCount" : 0, "retainCount" : 0, "retainReleaseDelta" : 0 -} \ No newline at end of file +} diff --git a/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json b/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json index 6d01e39f..d675b133 100644 --- a/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json +++ b/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json @@ -1,7 +1,7 @@ { - "allocatedResidentMemory" : 41123839, + "allocatedResidentMemory" : 10858240, "objectAllocCount" : 0, "releaseCount" : 0, "retainCount" : 0, "retainReleaseDelta" : 0 -} \ No newline at end of file +} diff --git a/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json b/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json index 7b1c657e..206c0eab 100644 --- a/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json +++ b/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json @@ -1,7 +1,7 @@ { - "allocatedResidentMemory" : 65536000, - "objectAllocCount" : 6977, - "releaseCount" : 32088, - "retainCount" : 21163, - "retainReleaseDelta" : 3948 -} \ No newline at end of file + "allocatedResidentMemory" : 12536000, + "objectAllocCount" : 4103, + "releaseCount" : 24088, + "retainCount" : 18163, + "retainReleaseDelta" : 2033 +} diff --git a/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json b/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json index 430c6b12..59d4cc02 100644 --- a/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json +++ b/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json @@ -1,7 +1,7 @@ { - "allocatedResidentMemory" : 70582272, + "allocatedResidentMemory" : 12582272, "objectAllocCount" : 7971, "releaseCount" : 34075, "retainCount" : 22158, "retainReleaseDelta" : 3946 -} \ No newline at end of file +} diff --git a/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json b/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json index 98c7f4b0..1e51ca6b 100644 --- a/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json +++ b/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json @@ -1,7 +1,7 @@ { - "allocatedResidentMemory" : 46989312, + "allocatedResidentMemory" : 12989312, "objectAllocCount" : 0, "releaseCount" : 0, "retainCount" : 0, "retainReleaseDelta" : 0 -} \ No newline at end of file +} diff --git a/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json b/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json index ea291b80..d675b133 100644 --- a/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json +++ b/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json @@ -1,7 +1,7 @@ { - "allocatedResidentMemory" : 46858240, + "allocatedResidentMemory" : 10858240, "objectAllocCount" : 0, "releaseCount" : 0, "retainCount" : 0, "retainReleaseDelta" : 0 -} \ No newline at end of file +} diff --git a/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json b/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json index 72395c4e..206c0eab 100644 --- a/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json +++ b/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json @@ -1,7 +1,7 @@ { - "allocatedResidentMemory" : 66977792, - "objectAllocCount" : 7034, - "releaseCount" : 37191, - "retainCount" : 26174, - "retainReleaseDelta" : 3983 -} \ No newline at end of file + "allocatedResidentMemory" : 12536000, + "objectAllocCount" : 4103, + "releaseCount" : 24088, + "retainCount" : 18163, + "retainReleaseDelta" : 2033 +} diff --git a/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json b/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json index 050304ab..59d4cc02 100644 --- a/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json +++ b/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json @@ -1,7 +1,7 @@ { - "allocatedResidentMemory" : 66322432, - "objectAllocCount" : 8002, - "releaseCount" : 39118, - "retainCount" : 27154, - "retainReleaseDelta" : 3962 -} \ No newline at end of file + "allocatedResidentMemory" : 12582272, + "objectAllocCount" : 7971, + "releaseCount" : 34075, + "retainCount" : 22158, + "retainReleaseDelta" : 3946 +} diff --git a/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json b/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json index 893cd3c3..1e51ca6b 100644 --- a/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json +++ b/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json @@ -1,7 +1,7 @@ { - "allocatedResidentMemory" : 45416448, + "allocatedResidentMemory" : 12989312, "objectAllocCount" : 0, "releaseCount" : 0, "retainCount" : 0, "retainReleaseDelta" : 0 -} \ No newline at end of file +} diff --git a/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json b/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json index 9244f104..d675b133 100644 --- a/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json +++ b/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json @@ -1,7 +1,7 @@ { - "allocatedResidentMemory" : 46137344, + "allocatedResidentMemory" : 10858240, "objectAllocCount" : 0, "releaseCount" : 0, "retainCount" : 0, "retainReleaseDelta" : 0 -} \ No newline at end of file +} From 860e1d5c0f7e5bd2769d0b31c7ce07eb7fa92306 Mon Sep 17 00:00:00 2001 From: BlindSpot <127803250+blindspotbounty@users.noreply.github.com> Date: Mon, 4 Mar 2024 15:15:08 +0200 Subject: [PATCH 20/25] set 20% threshold.. --- .../Utilities.swift | 24 +++++++++---------- 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/Utilities.swift b/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/Utilities.swift index af5ff98b..033cf085 100644 --- a/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/Utilities.swift +++ b/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/Utilities.swift @@ -146,22 +146,22 @@ let metricsToMeasure: [BenchmarkMetric] = : allMetricsToMeasure let stableMetricsThreasholds: [BenchmarkMetric: BenchmarkThresholds] = [ - .allocatedResidentMemory: .init(relative: [.p90: 10]), - .objectAllocCount: .init(relative: [.p90: 10]), - .retainCount: .init(relative: [.p90: 10]), - .releaseCount: .init(relative: [.p90: 10]), + .allocatedResidentMemory: .init(relative: [.p90: 20]), + .objectAllocCount: .init(relative: [.p90: 20]), + .retainCount: .init(relative: [.p90: 20]), + .releaseCount: .init(relative: [.p90: 20]), .retainReleaseDelta: .init(relative: [.p90: 20]), ] let allMetricsThreasholds: [BenchmarkMetric: BenchmarkThresholds] = [ - .wallClock: .init(relative: [.p90: 15]), - .cpuTotal: .init(relative: [.p90: 15]), - .allocatedResidentMemory: .init(relative: [.p90: 10]), - .contextSwitches: .init(relative: [.p90: 15]), - .throughput: .init(relative: [.p90: 15]), - .objectAllocCount: .init(relative: [.p90: 10]), - .retainCount: .init(relative: [.p90: 10]), - .releaseCount: .init(relative: [.p90: 10]), + .wallClock: .init(relative: [.p90: 35]), + .cpuTotal: .init(relative: [.p90: 35]), + .allocatedResidentMemory: .init(relative: [.p90: 20]), + .contextSwitches: .init(relative: [.p90: 35]), + .throughput: .init(relative: [.p90: 35]), + .objectAllocCount: .init(relative: [.p90: 20]), + .retainCount: .init(relative: [.p90: 20]), + .releaseCount: .init(relative: [.p90: 20]), .retainReleaseDelta: .init(relative: [.p90: 20]), ] From e328b68f3f3ca5b48e8116f74bee559ee05abc1d Mon Sep 17 00:00:00 2001 From: BlindSpot <127803250+blindspotbounty@users.noreply.github.com> Date: Mon, 4 Mar 2024 15:35:53 +0200 Subject: [PATCH 21/25] move arc to unstable metrics --- .../Benchmarks/SwiftKafkaConsumerBenchmarks/Utilities.swift | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/Utilities.swift b/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/Utilities.swift index 033cf085..9cba33e3 100644 --- a/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/Utilities.swift +++ b/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/Utilities.swift @@ -129,14 +129,14 @@ extension Benchmark { private let stableBenchmarkMetrics: [BenchmarkMetric] = [ .allocatedResidentMemory, -] + .arc +] private let allMetricsToMeasure: [BenchmarkMetric] = [ .wallClock, .cpuTotal, .contextSwitches, .throughput, -] + stableBenchmarkMetrics +] + stableBenchmarkMetrics + .arc private let useStableMetrics = Bool(ProcessInfo.processInfo.environment["KAFKA_USE_STABLE_BENCHMARK_METRICS"] ?? "false") == true From 4702bbbef7fe414d463619277c14d825f5ef920e Mon Sep 17 00:00:00 2001 From: BlindSpot <127803250+blindspotbounty@users.noreply.github.com> Date: Mon, 4 Mar 2024 16:24:40 +0200 Subject: [PATCH 22/25] try use 'true' in quotes for CI --- .../Benchmarks/SwiftKafkaConsumerBenchmarks/Utilities.swift | 4 ---- docker/docker-compose.yaml | 6 +++--- 2 files changed, 3 insertions(+), 7 deletions(-) diff --git a/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/Utilities.swift b/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/Utilities.swift index 9cba33e3..7926ebbc 100644 --- a/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/Utilities.swift +++ b/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/Utilities.swift @@ -147,10 +147,6 @@ let metricsToMeasure: [BenchmarkMetric] = let stableMetricsThreasholds: [BenchmarkMetric: BenchmarkThresholds] = [ .allocatedResidentMemory: .init(relative: [.p90: 20]), - .objectAllocCount: .init(relative: [.p90: 20]), - .retainCount: .init(relative: [.p90: 20]), - .releaseCount: .init(relative: [.p90: 20]), - .retainReleaseDelta: .init(relative: [.p90: 20]), ] let allMetricsThreasholds: [BenchmarkMetric: BenchmarkThresholds] = [ diff --git a/docker/docker-compose.yaml b/docker/docker-compose.yaml index 37812326..345a0272 100644 --- a/docker/docker-compose.yaml +++ b/docker/docker-compose.yaml @@ -52,7 +52,7 @@ services: environment: SWIFT_VERSION: 5.7 KAFKA_HOST: kafka - KAFKA_USE_STABLE_BENCHMARK_METRICS: 1 + KAFKA_USE_STABLE_BENCHMARK_METRICS: "true" command: > /bin/bash -xcl " swift build --build-tests $${SANITIZER_ARG-} && \ @@ -65,7 +65,7 @@ services: depends_on: [kafka, runtime-setup] environment: KAFKA_HOST: kafka - KAFKA_USE_STABLE_BENCHMARK_METRICS: 1 + KAFKA_USE_STABLE_BENCHMARK_METRICS: "true" command: > /bin/bash -xcl " cd Benchmarks && swift package --disable-sandbox benchmark @@ -76,7 +76,7 @@ services: depends_on: [kafka, runtime-setup] environment: KAFKA_HOST: kafka - KAFKA_USE_STABLE_BENCHMARK_METRICS: 1 + KAFKA_USE_STABLE_BENCHMARK_METRICS: "true" command: /bin/bash -xcl "cd Benchmarks && swift package --disable-sandbox --scratch-path .build/$${SWIFT_VERSION-}/ --allow-writing-to-package-directory benchmark --format metricP90AbsoluteThresholds --path Thresholds/$${SWIFT_VERSION-}/ --no-progress" # util From 7d2828bd61d346e79637f8048bd59597900246a6 Mon Sep 17 00:00:00 2001 From: BlindSpot <127803250+blindspotbounty@users.noreply.github.com> Date: Mon, 4 Mar 2024 17:48:02 +0200 Subject: [PATCH 23/25] try reduce number of messages for more reliable results --- .../KafkaConsumerBenchmark.swift | 2 +- ...SwiftKafkaConsumer_basic_consumer_messages_100.p90.json | 3 +++ ...wiftKafkaConsumer_basic_consumer_messages_1000.p90.json | 7 ------- ...tKafkaConsumer_with_offset_commit_messages_100.p90.json | 3 +++ ...KafkaConsumer_with_offset_commit_messages_1000.p90.json | 7 ------- ...chmarks.librdkafka_basic_consumer_messages_100.p90.json | 3 +++ ...hmarks.librdkafka_basic_consumer_messages_1000.p90.json | 7 ------- ...rks.librdkafka_with_offset_commit_messages_100.p90.json | 3 +++ ...ks.librdkafka_with_offset_commit_messages_1000.p90.json | 7 ------- ...SwiftKafkaConsumer_basic_consumer_messages_100.p90.json | 3 +++ ...wiftKafkaConsumer_basic_consumer_messages_1000.p90.json | 7 ------- ...tKafkaConsumer_with_offset_commit_messages_100.p90.json | 3 +++ ...KafkaConsumer_with_offset_commit_messages_1000.p90.json | 7 ------- ...chmarks.librdkafka_basic_consumer_messages_100.p90.json | 3 +++ ...hmarks.librdkafka_basic_consumer_messages_1000.p90.json | 7 ------- ...rks.librdkafka_with_offset_commit_messages_100.p90.json | 3 +++ ...ks.librdkafka_with_offset_commit_messages_1000.p90.json | 7 ------- ...SwiftKafkaConsumer_basic_consumer_messages_100.p90.json | 3 +++ ...wiftKafkaConsumer_basic_consumer_messages_1000.p90.json | 7 ------- ...tKafkaConsumer_with_offset_commit_messages_100.p90.json | 3 +++ ...KafkaConsumer_with_offset_commit_messages_1000.p90.json | 7 ------- ...chmarks.librdkafka_basic_consumer_messages_100.p90.json | 3 +++ ...hmarks.librdkafka_basic_consumer_messages_1000.p90.json | 7 ------- ...rks.librdkafka_with_offset_commit_messages_100.p90.json | 3 +++ ...ks.librdkafka_with_offset_commit_messages_1000.p90.json | 7 ------- ...SwiftKafkaConsumer_basic_consumer_messages_100.p90.json | 3 +++ ...wiftKafkaConsumer_basic_consumer_messages_1000.p90.json | 7 ------- ...tKafkaConsumer_with_offset_commit_messages_100.p90.json | 3 +++ ...KafkaConsumer_with_offset_commit_messages_1000.p90.json | 7 ------- ...chmarks.librdkafka_basic_consumer_messages_100.p90.json | 3 +++ ...hmarks.librdkafka_basic_consumer_messages_1000.p90.json | 7 ------- ...rks.librdkafka_with_offset_commit_messages_100.p90.json | 3 +++ ...ks.librdkafka_with_offset_commit_messages_1000.p90.json | 7 ------- ...SwiftKafkaConsumer_basic_consumer_messages_100.p90.json | 3 +++ ...wiftKafkaConsumer_basic_consumer_messages_1000.p90.json | 7 ------- ...tKafkaConsumer_with_offset_commit_messages_100.p90.json | 3 +++ ...KafkaConsumer_with_offset_commit_messages_1000.p90.json | 7 ------- ...chmarks.librdkafka_basic_consumer_messages_100.p90.json | 3 +++ ...hmarks.librdkafka_basic_consumer_messages_1000.p90.json | 7 ------- ...rks.librdkafka_with_offset_commit_messages_100.p90.json | 3 +++ ...ks.librdkafka_with_offset_commit_messages_1000.p90.json | 7 ------- 41 files changed, 61 insertions(+), 141 deletions(-) create mode 100644 Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_100.p90.json delete mode 100644 Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json create mode 100644 Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_100.p90.json delete mode 100644 Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json create mode 100644 Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_100.p90.json delete mode 100644 Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json create mode 100644 Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_100.p90.json delete mode 100644 Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json create mode 100644 Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_100.p90.json delete mode 100644 Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json create mode 100644 Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_100.p90.json delete mode 100644 Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json create mode 100644 Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_100.p90.json delete mode 100644 Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json create mode 100644 Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_100.p90.json delete mode 100644 Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json create mode 100644 Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_100.p90.json delete mode 100644 Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json create mode 100644 Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_100.p90.json delete mode 100644 Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json create mode 100644 Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_100.p90.json delete mode 100644 Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json create mode 100644 Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_100.p90.json delete mode 100644 Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json create mode 100644 Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_100.p90.json delete mode 100644 Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json create mode 100644 Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_100.p90.json delete mode 100644 Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json create mode 100644 Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_100.p90.json delete mode 100644 Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json create mode 100644 Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_100.p90.json delete mode 100644 Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json create mode 100644 Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_100.p90.json delete mode 100644 Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json create mode 100644 Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_100.p90.json delete mode 100644 Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json create mode 100644 Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_100.p90.json delete mode 100644 Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json create mode 100644 Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_100.p90.json delete mode 100644 Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json diff --git a/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/KafkaConsumerBenchmark.swift b/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/KafkaConsumerBenchmark.swift index 1a612412..4167493e 100644 --- a/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/KafkaConsumerBenchmark.swift +++ b/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/KafkaConsumerBenchmark.swift @@ -23,7 +23,7 @@ import ServiceLifecycle let benchmarks = { var uniqueTestTopic: String! - let messageCount: UInt = 1000 + let messageCount: UInt = 100 Benchmark.defaultConfiguration = .init( metrics: metricsToMeasure, diff --git a/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_100.p90.json b/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_100.p90.json new file mode 100644 index 00000000..2c863d69 --- /dev/null +++ b/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_100.p90.json @@ -0,0 +1,3 @@ +{ + "allocatedResidentMemory" : 66781184 +} \ No newline at end of file diff --git a/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json b/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json deleted file mode 100644 index 206c0eab..00000000 --- a/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "allocatedResidentMemory" : 12536000, - "objectAllocCount" : 4103, - "releaseCount" : 24088, - "retainCount" : 18163, - "retainReleaseDelta" : 2033 -} diff --git a/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_100.p90.json b/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_100.p90.json new file mode 100644 index 00000000..7a63f00b --- /dev/null +++ b/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_100.p90.json @@ -0,0 +1,3 @@ +{ + "allocatedResidentMemory" : 61341696 +} \ No newline at end of file diff --git a/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json b/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json deleted file mode 100644 index 59d4cc02..00000000 --- a/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "allocatedResidentMemory" : 12582272, - "objectAllocCount" : 7971, - "releaseCount" : 34075, - "retainCount" : 22158, - "retainReleaseDelta" : 3946 -} diff --git a/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_100.p90.json b/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_100.p90.json new file mode 100644 index 00000000..720afd32 --- /dev/null +++ b/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_100.p90.json @@ -0,0 +1,3 @@ +{ + "allocatedResidentMemory" : 41811968 +} \ No newline at end of file diff --git a/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json b/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json deleted file mode 100644 index 1e51ca6b..00000000 --- a/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "allocatedResidentMemory" : 12989312, - "objectAllocCount" : 0, - "releaseCount" : 0, - "retainCount" : 0, - "retainReleaseDelta" : 0 -} diff --git a/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_100.p90.json b/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_100.p90.json new file mode 100644 index 00000000..a4b30205 --- /dev/null +++ b/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_100.p90.json @@ -0,0 +1,3 @@ +{ + "allocatedResidentMemory" : 43778048 +} \ No newline at end of file diff --git a/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json b/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json deleted file mode 100644 index d675b133..00000000 --- a/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "allocatedResidentMemory" : 10858240, - "objectAllocCount" : 0, - "releaseCount" : 0, - "retainCount" : 0, - "retainReleaseDelta" : 0 -} diff --git a/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_100.p90.json b/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_100.p90.json new file mode 100644 index 00000000..eebce777 --- /dev/null +++ b/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_100.p90.json @@ -0,0 +1,3 @@ +{ + "allocatedResidentMemory" : 64946176 +} \ No newline at end of file diff --git a/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json b/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json deleted file mode 100644 index 206c0eab..00000000 --- a/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "allocatedResidentMemory" : 12536000, - "objectAllocCount" : 4103, - "releaseCount" : 24088, - "retainCount" : 18163, - "retainReleaseDelta" : 2033 -} diff --git a/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_100.p90.json b/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_100.p90.json new file mode 100644 index 00000000..f98ef069 --- /dev/null +++ b/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_100.p90.json @@ -0,0 +1,3 @@ +{ + "allocatedResidentMemory" : 72089599 +} \ No newline at end of file diff --git a/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json b/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json deleted file mode 100644 index 59d4cc02..00000000 --- a/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "allocatedResidentMemory" : 12582272, - "objectAllocCount" : 7971, - "releaseCount" : 34075, - "retainCount" : 22158, - "retainReleaseDelta" : 3946 -} diff --git a/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_100.p90.json b/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_100.p90.json new file mode 100644 index 00000000..630821f4 --- /dev/null +++ b/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_100.p90.json @@ -0,0 +1,3 @@ +{ + "allocatedResidentMemory" : 42729472 +} \ No newline at end of file diff --git a/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json b/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json deleted file mode 100644 index 1e51ca6b..00000000 --- a/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "allocatedResidentMemory" : 12989312, - "objectAllocCount" : 0, - "releaseCount" : 0, - "retainCount" : 0, - "retainReleaseDelta" : 0 -} diff --git a/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_100.p90.json b/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_100.p90.json new file mode 100644 index 00000000..add0331e --- /dev/null +++ b/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_100.p90.json @@ -0,0 +1,3 @@ +{ + "allocatedResidentMemory" : 40173568 +} \ No newline at end of file diff --git a/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json b/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json deleted file mode 100644 index d675b133..00000000 --- a/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "allocatedResidentMemory" : 10858240, - "objectAllocCount" : 0, - "releaseCount" : 0, - "retainCount" : 0, - "retainReleaseDelta" : 0 -} diff --git a/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_100.p90.json b/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_100.p90.json new file mode 100644 index 00000000..4c0acb55 --- /dev/null +++ b/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_100.p90.json @@ -0,0 +1,3 @@ +{ + "allocatedResidentMemory" : 65142784 +} \ No newline at end of file diff --git a/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json b/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json deleted file mode 100644 index 206c0eab..00000000 --- a/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "allocatedResidentMemory" : 12536000, - "objectAllocCount" : 4103, - "releaseCount" : 24088, - "retainCount" : 18163, - "retainReleaseDelta" : 2033 -} diff --git a/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_100.p90.json b/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_100.p90.json new file mode 100644 index 00000000..2c863d69 --- /dev/null +++ b/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_100.p90.json @@ -0,0 +1,3 @@ +{ + "allocatedResidentMemory" : 66781184 +} \ No newline at end of file diff --git a/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json b/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json deleted file mode 100644 index 59d4cc02..00000000 --- a/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "allocatedResidentMemory" : 12582272, - "objectAllocCount" : 7971, - "releaseCount" : 34075, - "retainCount" : 22158, - "retainReleaseDelta" : 3946 -} diff --git a/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_100.p90.json b/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_100.p90.json new file mode 100644 index 00000000..000b1ed4 --- /dev/null +++ b/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_100.p90.json @@ -0,0 +1,3 @@ +{ + "allocatedResidentMemory" : 42172415 +} \ No newline at end of file diff --git a/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json b/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json deleted file mode 100644 index 1e51ca6b..00000000 --- a/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "allocatedResidentMemory" : 12989312, - "objectAllocCount" : 0, - "releaseCount" : 0, - "retainCount" : 0, - "retainReleaseDelta" : 0 -} diff --git a/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_100.p90.json b/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_100.p90.json new file mode 100644 index 00000000..fc97b516 --- /dev/null +++ b/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_100.p90.json @@ -0,0 +1,3 @@ +{ + "allocatedResidentMemory" : 43384832 +} \ No newline at end of file diff --git a/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json b/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json deleted file mode 100644 index d675b133..00000000 --- a/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "allocatedResidentMemory" : 10858240, - "objectAllocCount" : 0, - "releaseCount" : 0, - "retainCount" : 0, - "retainReleaseDelta" : 0 -} diff --git a/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_100.p90.json b/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_100.p90.json new file mode 100644 index 00000000..341ea368 --- /dev/null +++ b/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_100.p90.json @@ -0,0 +1,3 @@ +{ + "allocatedResidentMemory" : 64880640 +} \ No newline at end of file diff --git a/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json b/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json deleted file mode 100644 index 206c0eab..00000000 --- a/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "allocatedResidentMemory" : 12536000, - "objectAllocCount" : 4103, - "releaseCount" : 24088, - "retainCount" : 18163, - "retainReleaseDelta" : 2033 -} diff --git a/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_100.p90.json b/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_100.p90.json new file mode 100644 index 00000000..1736e180 --- /dev/null +++ b/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_100.p90.json @@ -0,0 +1,3 @@ +{ + "allocatedResidentMemory" : 65798144 +} \ No newline at end of file diff --git a/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json b/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json deleted file mode 100644 index 59d4cc02..00000000 --- a/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "allocatedResidentMemory" : 12582272, - "objectAllocCount" : 7971, - "releaseCount" : 34075, - "retainCount" : 22158, - "retainReleaseDelta" : 3946 -} diff --git a/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_100.p90.json b/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_100.p90.json new file mode 100644 index 00000000..8f155e09 --- /dev/null +++ b/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_100.p90.json @@ -0,0 +1,3 @@ +{ + "allocatedResidentMemory" : 40042496 +} \ No newline at end of file diff --git a/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json b/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json deleted file mode 100644 index 1e51ca6b..00000000 --- a/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "allocatedResidentMemory" : 12989312, - "objectAllocCount" : 0, - "releaseCount" : 0, - "retainCount" : 0, - "retainReleaseDelta" : 0 -} diff --git a/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_100.p90.json b/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_100.p90.json new file mode 100644 index 00000000..8bd4e137 --- /dev/null +++ b/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_100.p90.json @@ -0,0 +1,3 @@ +{ + "allocatedResidentMemory" : 41091072 +} \ No newline at end of file diff --git a/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json b/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json deleted file mode 100644 index d675b133..00000000 --- a/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "allocatedResidentMemory" : 10858240, - "objectAllocCount" : 0, - "releaseCount" : 0, - "retainCount" : 0, - "retainReleaseDelta" : 0 -} diff --git a/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_100.p90.json b/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_100.p90.json new file mode 100644 index 00000000..e3da3db0 --- /dev/null +++ b/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_100.p90.json @@ -0,0 +1,3 @@ +{ + "allocatedResidentMemory" : 73203712 +} \ No newline at end of file diff --git a/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json b/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json deleted file mode 100644 index 206c0eab..00000000 --- a/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "allocatedResidentMemory" : 12536000, - "objectAllocCount" : 4103, - "releaseCount" : 24088, - "retainCount" : 18163, - "retainReleaseDelta" : 2033 -} diff --git a/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_100.p90.json b/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_100.p90.json new file mode 100644 index 00000000..e2748391 --- /dev/null +++ b/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_100.p90.json @@ -0,0 +1,3 @@ +{ + "allocatedResidentMemory" : 64290816 +} \ No newline at end of file diff --git a/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json b/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json deleted file mode 100644 index 59d4cc02..00000000 --- a/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "allocatedResidentMemory" : 12582272, - "objectAllocCount" : 7971, - "releaseCount" : 34075, - "retainCount" : 22158, - "retainReleaseDelta" : 3946 -} diff --git a/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_100.p90.json b/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_100.p90.json new file mode 100644 index 00000000..7269fcc7 --- /dev/null +++ b/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_100.p90.json @@ -0,0 +1,3 @@ +{ + "allocatedResidentMemory" : 41680896 +} \ No newline at end of file diff --git a/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json b/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json deleted file mode 100644 index 1e51ca6b..00000000 --- a/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "allocatedResidentMemory" : 12989312, - "objectAllocCount" : 0, - "releaseCount" : 0, - "retainCount" : 0, - "retainReleaseDelta" : 0 -} diff --git a/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_100.p90.json b/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_100.p90.json new file mode 100644 index 00000000..7c1f7895 --- /dev/null +++ b/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_100.p90.json @@ -0,0 +1,3 @@ +{ + "allocatedResidentMemory" : 42532864 +} \ No newline at end of file diff --git a/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json b/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json deleted file mode 100644 index d675b133..00000000 --- a/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "allocatedResidentMemory" : 10858240, - "objectAllocCount" : 0, - "releaseCount" : 0, - "retainCount" : 0, - "retainReleaseDelta" : 0 -} From fedbeff1d281dd8800b6551d619f39b0de7febcb Mon Sep 17 00:00:00 2001 From: BlindSpot <127803250+blindspotbounty@users.noreply.github.com> Date: Mon, 4 Mar 2024 17:59:52 +0200 Subject: [PATCH 24/25] try upgrade bench --- Benchmarks/Package.swift | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Benchmarks/Package.swift b/Benchmarks/Package.swift index e27d3106..4ea81f8c 100644 --- a/Benchmarks/Package.swift +++ b/Benchmarks/Package.swift @@ -22,7 +22,7 @@ let package = Package( ], dependencies: [ .package(path: "../"), - .package(url: "https://github.com/ordo-one/package-benchmark.git", from: "1.11.1"), + .package(url: "https://github.com/ordo-one/package-benchmark.git", from: "1.22.3"), ], targets: [ .executableTarget( From 2799c02056accb77e5501b9bc1789fec38ed983f Mon Sep 17 00:00:00 2001 From: BlindSpot <127803250+blindspotbounty@users.noreply.github.com> Date: Mon, 4 Mar 2024 18:11:34 +0200 Subject: [PATCH 25/25] disable benchmark in CI --- .../KafkaConsumerBenchmark.swift | 22 +++++++++-- .../Utilities.swift | 39 ------------------- ...sumer_basic_consumer_messages_100.p90.json | 3 -- ...r_with_offset_commit_messages_100.p90.json | 3 -- ...kafka_basic_consumer_messages_100.p90.json | 3 -- ...a_with_offset_commit_messages_100.p90.json | 3 -- ...sumer_basic_consumer_messages_100.p90.json | 3 -- ...r_with_offset_commit_messages_100.p90.json | 3 -- ...kafka_basic_consumer_messages_100.p90.json | 3 -- ...a_with_offset_commit_messages_100.p90.json | 3 -- ...sumer_basic_consumer_messages_100.p90.json | 3 -- ...r_with_offset_commit_messages_100.p90.json | 3 -- ...kafka_basic_consumer_messages_100.p90.json | 3 -- ...a_with_offset_commit_messages_100.p90.json | 3 -- ...sumer_basic_consumer_messages_100.p90.json | 3 -- ...r_with_offset_commit_messages_100.p90.json | 3 -- ...kafka_basic_consumer_messages_100.p90.json | 3 -- ...a_with_offset_commit_messages_100.p90.json | 3 -- ...sumer_basic_consumer_messages_100.p90.json | 3 -- ...r_with_offset_commit_messages_100.p90.json | 3 -- ...kafka_basic_consumer_messages_100.p90.json | 3 -- ...a_with_offset_commit_messages_100.p90.json | 3 -- docker/docker-compose.yaml | 6 +-- 23 files changed, 20 insertions(+), 107 deletions(-) delete mode 100644 Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_100.p90.json delete mode 100644 Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_100.p90.json delete mode 100644 Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_100.p90.json delete mode 100644 Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_100.p90.json delete mode 100644 Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_100.p90.json delete mode 100644 Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_100.p90.json delete mode 100644 Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_100.p90.json delete mode 100644 Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_100.p90.json delete mode 100644 Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_100.p90.json delete mode 100644 Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_100.p90.json delete mode 100644 Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_100.p90.json delete mode 100644 Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_100.p90.json delete mode 100644 Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_100.p90.json delete mode 100644 Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_100.p90.json delete mode 100644 Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_100.p90.json delete mode 100644 Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_100.p90.json delete mode 100644 Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_100.p90.json delete mode 100644 Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_100.p90.json delete mode 100644 Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_100.p90.json delete mode 100644 Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_100.p90.json diff --git a/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/KafkaConsumerBenchmark.swift b/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/KafkaConsumerBenchmark.swift index 4167493e..ec0bff9a 100644 --- a/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/KafkaConsumerBenchmark.swift +++ b/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/KafkaConsumerBenchmark.swift @@ -23,15 +23,31 @@ import ServiceLifecycle let benchmarks = { var uniqueTestTopic: String! - let messageCount: UInt = 100 + let messageCount: UInt = 1000 Benchmark.defaultConfiguration = .init( - metrics: metricsToMeasure, + metrics: [ + .wallClock, + .cpuTotal, + .contextSwitches, + .throughput, + .allocatedResidentMemory, + ] + .arc, warmupIterations: 0, scalingFactor: .one, maxDuration: .seconds(5), maxIterations: 100, - thresholds: metricsThreasholds + thresholds: [ + .wallClock: .init(relative: [.p90: 35]), + .cpuTotal: .init(relative: [.p90: 35]), + .allocatedResidentMemory: .init(relative: [.p90: 20]), + .contextSwitches: .init(relative: [.p90: 35]), + .throughput: .init(relative: [.p90: 35]), + .objectAllocCount: .init(relative: [.p90: 20]), + .retainCount: .init(relative: [.p90: 20]), + .releaseCount: .init(relative: [.p90: 20]), + .retainReleaseDelta: .init(relative: [.p90: 20]), + ] ) Benchmark.setup = { diff --git a/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/Utilities.swift b/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/Utilities.swift index 7926ebbc..304dc1fb 100644 --- a/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/Utilities.swift +++ b/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/Utilities.swift @@ -126,42 +126,3 @@ extension Benchmark { return try await body() } } - -private let stableBenchmarkMetrics: [BenchmarkMetric] = [ - .allocatedResidentMemory, -] - -private let allMetricsToMeasure: [BenchmarkMetric] = [ - .wallClock, - .cpuTotal, - .contextSwitches, - .throughput, -] + stableBenchmarkMetrics + .arc - -private let useStableMetrics = Bool(ProcessInfo.processInfo.environment["KAFKA_USE_STABLE_BENCHMARK_METRICS"] ?? "false") == true - -let metricsToMeasure: [BenchmarkMetric] = - useStableMetrics - ? stableBenchmarkMetrics - : allMetricsToMeasure - -let stableMetricsThreasholds: [BenchmarkMetric: BenchmarkThresholds] = [ - .allocatedResidentMemory: .init(relative: [.p90: 20]), -] - -let allMetricsThreasholds: [BenchmarkMetric: BenchmarkThresholds] = [ - .wallClock: .init(relative: [.p90: 35]), - .cpuTotal: .init(relative: [.p90: 35]), - .allocatedResidentMemory: .init(relative: [.p90: 20]), - .contextSwitches: .init(relative: [.p90: 35]), - .throughput: .init(relative: [.p90: 35]), - .objectAllocCount: .init(relative: [.p90: 20]), - .retainCount: .init(relative: [.p90: 20]), - .releaseCount: .init(relative: [.p90: 20]), - .retainReleaseDelta: .init(relative: [.p90: 20]), -] - -let metricsThreasholds: [BenchmarkMetric: BenchmarkThresholds] = - useStableMetrics - ? stableMetricsThreasholds - : allMetricsThreasholds diff --git a/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_100.p90.json b/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_100.p90.json deleted file mode 100644 index 2c863d69..00000000 --- a/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_100.p90.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "allocatedResidentMemory" : 66781184 -} \ No newline at end of file diff --git a/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_100.p90.json b/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_100.p90.json deleted file mode 100644 index 7a63f00b..00000000 --- a/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_100.p90.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "allocatedResidentMemory" : 61341696 -} \ No newline at end of file diff --git a/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_100.p90.json b/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_100.p90.json deleted file mode 100644 index 720afd32..00000000 --- a/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_100.p90.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "allocatedResidentMemory" : 41811968 -} \ No newline at end of file diff --git a/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_100.p90.json b/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_100.p90.json deleted file mode 100644 index a4b30205..00000000 --- a/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_100.p90.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "allocatedResidentMemory" : 43778048 -} \ No newline at end of file diff --git a/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_100.p90.json b/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_100.p90.json deleted file mode 100644 index eebce777..00000000 --- a/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_100.p90.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "allocatedResidentMemory" : 64946176 -} \ No newline at end of file diff --git a/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_100.p90.json b/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_100.p90.json deleted file mode 100644 index f98ef069..00000000 --- a/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_100.p90.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "allocatedResidentMemory" : 72089599 -} \ No newline at end of file diff --git a/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_100.p90.json b/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_100.p90.json deleted file mode 100644 index 630821f4..00000000 --- a/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_100.p90.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "allocatedResidentMemory" : 42729472 -} \ No newline at end of file diff --git a/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_100.p90.json b/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_100.p90.json deleted file mode 100644 index add0331e..00000000 --- a/Benchmarks/Thresholds/5.7/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_100.p90.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "allocatedResidentMemory" : 40173568 -} \ No newline at end of file diff --git a/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_100.p90.json b/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_100.p90.json deleted file mode 100644 index 4c0acb55..00000000 --- a/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_100.p90.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "allocatedResidentMemory" : 65142784 -} \ No newline at end of file diff --git a/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_100.p90.json b/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_100.p90.json deleted file mode 100644 index 2c863d69..00000000 --- a/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_100.p90.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "allocatedResidentMemory" : 66781184 -} \ No newline at end of file diff --git a/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_100.p90.json b/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_100.p90.json deleted file mode 100644 index 000b1ed4..00000000 --- a/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_100.p90.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "allocatedResidentMemory" : 42172415 -} \ No newline at end of file diff --git a/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_100.p90.json b/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_100.p90.json deleted file mode 100644 index fc97b516..00000000 --- a/Benchmarks/Thresholds/5.8/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_100.p90.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "allocatedResidentMemory" : 43384832 -} \ No newline at end of file diff --git a/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_100.p90.json b/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_100.p90.json deleted file mode 100644 index 341ea368..00000000 --- a/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_100.p90.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "allocatedResidentMemory" : 64880640 -} \ No newline at end of file diff --git a/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_100.p90.json b/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_100.p90.json deleted file mode 100644 index 1736e180..00000000 --- a/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_100.p90.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "allocatedResidentMemory" : 65798144 -} \ No newline at end of file diff --git a/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_100.p90.json b/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_100.p90.json deleted file mode 100644 index 8f155e09..00000000 --- a/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_100.p90.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "allocatedResidentMemory" : 40042496 -} \ No newline at end of file diff --git a/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_100.p90.json b/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_100.p90.json deleted file mode 100644 index 8bd4e137..00000000 --- a/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_100.p90.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "allocatedResidentMemory" : 41091072 -} \ No newline at end of file diff --git a/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_100.p90.json b/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_100.p90.json deleted file mode 100644 index e3da3db0..00000000 --- a/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_100.p90.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "allocatedResidentMemory" : 73203712 -} \ No newline at end of file diff --git a/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_100.p90.json b/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_100.p90.json deleted file mode 100644 index e2748391..00000000 --- a/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_100.p90.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "allocatedResidentMemory" : 64290816 -} \ No newline at end of file diff --git a/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_100.p90.json b/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_100.p90.json deleted file mode 100644 index 7269fcc7..00000000 --- a/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_100.p90.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "allocatedResidentMemory" : 41680896 -} \ No newline at end of file diff --git a/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_100.p90.json b/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_100.p90.json deleted file mode 100644 index 7c1f7895..00000000 --- a/Benchmarks/Thresholds/main/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_100.p90.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "allocatedResidentMemory" : 42532864 -} \ No newline at end of file diff --git a/docker/docker-compose.yaml b/docker/docker-compose.yaml index 345a0272..10f1665c 100644 --- a/docker/docker-compose.yaml +++ b/docker/docker-compose.yaml @@ -52,12 +52,10 @@ services: environment: SWIFT_VERSION: 5.7 KAFKA_HOST: kafka - KAFKA_USE_STABLE_BENCHMARK_METRICS: "true" command: > /bin/bash -xcl " swift build --build-tests $${SANITIZER_ARG-} && \ - swift $${SWIFT_TEST_VERB-test} $${WARN_AS_ERROR_ARG-} $${SANITIZER_ARG-} $${IMPORT_CHECK_ARG-} $${STRICT_CONCURRENCY_ARG-} && \ - cd Benchmarks && swift package --disable-sandbox benchmark baseline check --check-absolute-path Thresholds/$${SWIFT_VERSION-}/ --no-progress + swift $${SWIFT_TEST_VERB-test} $${WARN_AS_ERROR_ARG-} $${SANITIZER_ARG-} $${IMPORT_CHECK_ARG-} $${STRICT_CONCURRENCY_ARG-} " benchmark: @@ -65,7 +63,6 @@ services: depends_on: [kafka, runtime-setup] environment: KAFKA_HOST: kafka - KAFKA_USE_STABLE_BENCHMARK_METRICS: "true" command: > /bin/bash -xcl " cd Benchmarks && swift package --disable-sandbox benchmark @@ -76,7 +73,6 @@ services: depends_on: [kafka, runtime-setup] environment: KAFKA_HOST: kafka - KAFKA_USE_STABLE_BENCHMARK_METRICS: "true" command: /bin/bash -xcl "cd Benchmarks && swift package --disable-sandbox --scratch-path .build/$${SWIFT_VERSION-}/ --allow-writing-to-package-directory benchmark --format metricP90AbsoluteThresholds --path Thresholds/$${SWIFT_VERSION-}/ --no-progress" # util