Skip to content

Commit

Permalink
Lincheck statistics gathering and benchmarks infra
Browse files Browse the repository at this point in the history
Signed-off-by: Evgeniy Moiseenko <[email protected]>
  • Loading branch information
eupp committed Jun 17, 2024
1 parent 8c3ed5d commit 2b8d4fa
Show file tree
Hide file tree
Showing 23 changed files with 1,637 additions and 32 deletions.
73 changes: 73 additions & 0 deletions build.gradle.kts
Original file line number Diff line number Diff line change
Expand Up @@ -5,11 +5,14 @@ import org.gradle.jvm.tasks.Jar
// atomicfu
buildscript {
val atomicfuVersion: String by project
val serializationPluginVersion: String by project
dependencies {
classpath("org.jetbrains.kotlinx:atomicfu-gradle-plugin:$atomicfuVersion")
classpath("org.jetbrains.kotlin:kotlin-serialization:$serializationPluginVersion")
}
}
apply(plugin = "kotlinx-atomicfu")
apply(plugin = "kotlinx-serialization")

plugins {
java
Expand All @@ -23,6 +26,9 @@ repositories {
}

kotlin {
// we have to create custom sourceSets in advance before defining corresponding compilation targets
sourceSets.create("jvmBenchmark")

jvm {
withJava()

Expand All @@ -33,6 +39,51 @@ kotlin {
val test by compilations.getting {
kotlinOptions.jvmTarget = "11"
}

val benchmark by compilations.creating {
kotlinOptions.jvmTarget = "11"

defaultSourceSet {
dependencies {
implementation(main.compileDependencyFiles + main.output.classesDirs)
}
}

val benchmarksClassPath =
compileDependencyFiles +
runtimeDependencyFiles +
output.allOutputs +
files("$buildDir/processedResources/jvm/main")

val benchmarksTestClassesDirs = output.classesDirs

// task allowing to run benchmarks using JUnit API
val benchmark = tasks.register<Test>("jvmBenchmark") {
classpath = benchmarksClassPath
testClassesDirs = benchmarksTestClassesDirs
dependsOn("processResources")
}

// task aggregating all benchmarks into a single suite and producing custom reports
val benchmarkSuite = tasks.register<Test>("jvmBenchmarkSuite") {
classpath = benchmarksClassPath
testClassesDirs = benchmarksTestClassesDirs
filter {
includeTestsMatching("LincheckBenchmarkSuite")
}
// pass the properties
systemProperty("statisticsGranularity", System.getProperty("statisticsGranularity"))
// always re-run test suite
outputs.upToDateWhen { false }
dependsOn("processResources")
}

// task producing plots given the benchmarks report file
val benchmarkPlots by tasks.register<JavaExec>("runBenchmarkPlots") {
classpath = benchmarksClassPath
mainClass.set("org.jetbrains.kotlinx.lincheck_benchmark.PlotsKt")
}
}
}

sourceSets {
Expand Down Expand Up @@ -69,6 +120,28 @@ kotlin {
implementation("io.mockk:mockk:${mockkVersion}")
}
}

val jvmBenchmark by getting {
kotlin.srcDirs("src/jvm/benchmark")

val junitVersion: String by project
val jctoolsVersion: String by project
val serializationVersion: String by project
val letsPlotVersion: String by project
val letsPlotKotlinVersion: String by project
val cliktVersion: String by project
dependencies {
implementation(project(":bootstrap"))
implementation("junit:junit:$junitVersion")
implementation("org.jctools:jctools-core:$jctoolsVersion")
implementation("org.jetbrains.kotlinx:kotlinx-serialization-json:$serializationVersion")
implementation("org.jetbrains.lets-plot:lets-plot-common:$letsPlotVersion")
implementation("org.jetbrains.lets-plot:lets-plot-kotlin-jvm:$letsPlotKotlinVersion")
implementation("com.github.ajalt.clikt:clikt:$cliktVersion")
}
}

// jvmBenchmark.dependsOn(jvmMain)
}
}

Expand Down
6 changes: 6 additions & 0 deletions gradle.properties
Original file line number Diff line number Diff line change
Expand Up @@ -22,9 +22,15 @@ withEventIdSequentialCheck=false

kotlinVersion=1.9.21
kotlinxCoroutinesVersion=1.7.3

asmVersion=9.6
atomicfuVersion=0.20.2
byteBuddyVersion=1.14.12
serializationPluginVersion=1.6.21
serializationVersion=1.3.3
letsPlotVersion=2.5.0
letsPlotKotlinVersion=4.0.0
cliktVersion=3.4.0

junitVersion=4.13.1
jctoolsVersion=3.3.0
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,82 @@
/*
* Lincheck
*
* Copyright (C) 2019 - 2023 JetBrains s.r.o.
*
* This Source Code Form is subject to the terms of the
* Mozilla Public License, v. 2.0. If a copy of the MPL was not distributed
* with this file, You can obtain one at http://mozilla.org/MPL/2.0/.
*/

@file:Suppress("INVISIBLE_REFERENCE", "INVISIBLE_MEMBER")

package org.jetbrains.kotlinx.lincheck_benchmark

import org.jetbrains.kotlinx.lincheck.*
import org.jetbrains.kotlinx.lincheck.strategy.*
import org.jetbrains.kotlinx.lincheck.strategy.managed.modelchecking.ModelCheckingOptions
import org.jetbrains.kotlinx.lincheck.strategy.stress.StressOptions
import kotlin.reflect.KClass
import org.junit.Test


abstract class AbstractLincheckBenchmark(
private vararg val expectedFailures: KClass<out LincheckFailure>
) {

@Test(timeout = TIMEOUT)
fun benchmarkWithStressStrategy(): Unit = StressOptions().run {
invocationsPerIteration(5_000)
configure()
runTest()
}

@Test(timeout = TIMEOUT)
fun benchmarkWithModelCheckingStrategy(): Unit = ModelCheckingOptions().run {
invocationsPerIteration(5_000)
configure()
runTest()
}

private fun <O : Options<O, *>> O.runTest() {
val statisticsTracker = LincheckStatisticsTracker(
granularity = benchmarksReporter.granularity
)
val klass = this@AbstractLincheckBenchmark::class
val checker = LinChecker(klass.java, this)
val failure = checker.checkImpl(customTracker = statisticsTracker)
if (failure == null) {
assert(expectedFailures.isEmpty()) {
"This test should fail, but no error has been occurred (see the logs for details)"
}
} else {
assert(expectedFailures.contains(failure::class)) {
"This test has failed with an unexpected error: \n $failure"
}
}
val statistics = statisticsTracker.toBenchmarkStatistics(
name = klass.simpleName!!.removeSuffix("Benchmark"),
strategy = when (this) {
is StressOptions -> LincheckStrategy.Stress
is ModelCheckingOptions -> LincheckStrategy.ModelChecking
else -> throw IllegalStateException("Unsupported Lincheck strategy")
}
)
benchmarksReporter.registerBenchmark(statistics)
}

private fun <O : Options<O, *>> O.configure(): Unit = run {
iterations(30)
threads(3)
actorsPerThread(2)
actorsBefore(2)
actorsAfter(2)
minimizeFailedScenario(false)
customize()
}

internal open fun <O: Options<O, *>> O.customize() {}

}

private const val TIMEOUT = 5 * 60 * 1000L // 5 minutes
Original file line number Diff line number Diff line change
@@ -0,0 +1,129 @@
/*
* Lincheck
*
* Copyright (C) 2019 - 2023 JetBrains s.r.o.
*
* This Source Code Form is subject to the terms of the
* Mozilla Public License, v. 2.0. If a copy of the MPL was not distributed
* with this file, You can obtain one at http://mozilla.org/MPL/2.0/.
*/

@file:Suppress("INVISIBLE_REFERENCE", "INVISIBLE_MEMBER")

package org.jetbrains.kotlinx.lincheck_benchmark

import org.jetbrains.kotlinx.lincheck.*
import kotlinx.serialization.Serializable
import kotlinx.serialization.json.Json
import kotlinx.serialization.json.encodeToStream
import kotlin.time.Duration.Companion.nanoseconds
import kotlin.time.DurationUnit
import java.io.File


typealias BenchmarkID = String

@Serializable
data class BenchmarksReport(
val data: Map<String, BenchmarkStatistics>
)

@Serializable
data class BenchmarkStatistics(
val name: String,
val strategy: LincheckStrategy,
val runningTimeNano: Long,
val iterationsCount: Int,
val invocationsCount: Int,
val scenariosStatistics: List<ScenarioStatistics>,
val invocationsRunningTimeNano: LongArray,
)

@Serializable
data class ScenarioStatistics(
val threads: Int,
val operations: Int,
val invocationsCount: Int,
val runningTimeNano: Long,
val invocationAverageTimeNano: Long,
val invocationStandardErrorTimeNano: Long,
)

val BenchmarksReport.benchmarkIDs: List<BenchmarkID>
get() = data.keys.toList()

val BenchmarksReport.benchmarkNames: List<String>
get() = data.map { (_, statistics) -> statistics.name }.distinct()

val BenchmarkStatistics.id: BenchmarkID
get() = "$name-$strategy"

fun LincheckStatistics.toBenchmarkStatistics(name: String, strategy: LincheckStrategy) = BenchmarkStatistics(
name = name,
strategy = strategy,
runningTimeNano = runningTimeNano,
iterationsCount = iterationsCount,
invocationsCount = invocationsCount,
invocationsRunningTimeNano = iterationsStatistics
.values.map { it.invocationsRunningTimeNano }
.flatten(),
scenariosStatistics = iterationsStatistics
.values.groupBy { (it.scenario.nThreads to it.scenario.parallelExecution[0].size) }
.map { (key, statistics) ->
val (threads, operations) = key
val invocationsRunningTime = statistics
.map { it.invocationsRunningTimeNano }
.flatten()
val invocationsCount = statistics.sumOf { it.invocationsCount }
val runningTimeNano = statistics.sumOf { it.runningTimeNano }
val invocationAverageTimeNano = when {
// handle the case when per-invocation statistics is not gathered
invocationsRunningTime.isEmpty() -> (runningTimeNano.toDouble() / invocationsCount).toLong()
else -> invocationsRunningTime.average().toLong()
}
val invocationStandardErrorTimeNano = when {
// if per-invocation statistics is not gathered we cannot compute standard error
invocationsRunningTime.isEmpty() -> -1L
else -> invocationsRunningTime.standardError().toLong()
}
ScenarioStatistics(
threads = threads,
operations = operations,
invocationsCount = invocationsCount,
runningTimeNano = runningTimeNano,
invocationAverageTimeNano = invocationAverageTimeNano,
invocationStandardErrorTimeNano = invocationStandardErrorTimeNano,
)
}
)

fun BenchmarksReport.saveJson(filename: String) {
val file = File("$filename.json")
file.outputStream().use { outputStream ->
Json.encodeToStream(this, outputStream)
}
}

// saves the report in simple text format for testing integration with ij-perf dashboards
fun BenchmarksReport.saveTxt(filename: String) {
val text = StringBuilder().apply {
appendReportHeader()
for (benchmarkStatistics in data.values) {
// for ij-perf reports, we currently track only benchmarks overall running time
appendBenchmarkRunningTime(benchmarkStatistics)
}
}.toString()
val file = File("$filename.txt")
file.writeText(text, charset = Charsets.US_ASCII)
}

private fun StringBuilder.appendReportHeader() {
appendLine("Lincheck benchmarks suite")
}

private fun StringBuilder.appendBenchmarkRunningTime(benchmarkStatistics: BenchmarkStatistics) {
with(benchmarkStatistics) {
val runningTimeMs = runningTimeNano.nanoseconds.toLong(DurationUnit.MILLISECONDS)
appendLine("${strategy}.${name}.runtime.ms $runningTimeMs")
}
}
Loading

0 comments on commit 2b8d4fa

Please sign in to comment.