HprofIOPerfTest.kt
TLDR
The provided file, HprofIOPerfTest.kt
, is a Kotlin test file that contains performance tests for reading the Hprof file format in the Shark library. The tests measure the efficiency of various operations related to reading the Hprof file, such as reading the byte size of object arrays, primitive arrays, and instances, as well as reading records and performing random access operations. The file also includes utility methods for tracking IO read metrics and computing expected reads.
Methods
trackAnalyzeRandomAccessMetrics(hprofFile: File): Pair<Reads, Reads>
This method tracks the IO read metrics for random access operations on an Hprof file. It returns a pair of Reads
objects, each containing the count, median bytes read, and total bytes read.
trackAnalyzeIoReadMetrics(hprofFile: File, computeRetainedHeapSize: Boolean = false, printResult: Boolean = false): List<List<Int>>
This method tracks the IO read metrics for various operations on an Hprof file. It takes an optional parameter to compute the retained heap size and print the result. It returns a list of lists containing the IO read metrics for different phases of the analysis.
fullScanExpectedReads(fileLength: Long): List<Int>
This method computes the expected reads for a full file scan based on the length of the file. It returns a list of integers representing the number of bytes read in each segment.
Classes
HprofIOPerfTest
This class contains performance tests for reading the Hprof file format in the Shark library. It includes methods for measuring the efficiency of various operations related to reading the Hprof file, as well as utility methods for tracking IO read metrics and computing expected reads.
Reads
This inner class represents the IO read metrics, including the count, median bytes read, and total bytes read.
MetricsDualSourceProvider (external class)
This class is imported from another package (shark.MetricsDualSourceProvider
). It is used as a source provider for opening the heap graph from an Hprof file.
END
package shark
import java.io.File
import kotlin.math.floor
import org.assertj.core.api.Assertions.assertThat
import org.junit.Test
import org.nield.kotlinstatistics.median
import shark.HprofHeapGraph.Companion.openHeapGraph
/**
* IO reads is the largest factor on Shark's performance so this helps prevents
* regressions.
*/
class HprofIOPerfTest {
@Test fun `HeapObjectArray#readByteSize() does not read`() {
val hprofFile = "leak_asynctask_o.hprof".classpathFile()
val arrayId = hprofFile.openHeapGraph().use { graph ->
graph.objectArrays.maxBy { it.readRecord().elementIds.size * graph.identifierByteSize }!!.objectId
}
val source = MetricsDualSourceProvider(hprofFile)
val bytesRead = source.openHeapGraph().use { graph ->
val bytesReadMetrics = source.sourcesMetrics.last().apply { clear() }
graph.findObjectById(arrayId).asObjectArray!!.byteSize
bytesReadMetrics.sum()
}
assertThat(bytesRead).isEqualTo(0)
}
@Test fun `HeapObjectArray#byteSize correctly reads size of array`() {
val hprofFile = "leak_asynctask_o.hprof".classpathFile()
hprofFile.openHeapGraph().use { graph ->
graph.objectArrays.forEach { array ->
assertThat(array.byteSize).isEqualTo(
array.readRecord().elementIds.size * graph.identifierByteSize
)
}
}
}
@Test fun `HeapPrimitiveArray#byteSize does not read`() {
val hprofFile = "leak_asynctask_o.hprof".classpathFile()
val arrayId = hprofFile.openHeapGraph().use { graph ->
graph.primitiveArrays.maxBy { it.readRecord().size * it.primitiveType.byteSize }!!.objectId
}
val source = MetricsDualSourceProvider(hprofFile)
val bytesRead = source.openHeapGraph().use { graph ->
val bytesReadMetrics = source.sourcesMetrics.last().apply { clear() }
graph.findObjectById(arrayId).asPrimitiveArray!!.byteSize
bytesReadMetrics.sum()
}
assertThat(bytesRead).isEqualTo(0)
}
@Test fun `HeapPrimitiveArray#readByteSize() correctly reads size of array`() {
val hprofFile = "leak_asynctask_o.hprof".classpathFile()
hprofFile.openHeapGraph().use { graph ->
graph.primitiveArrays.forEach { array ->
assertThat(array.byteSize).isEqualTo(
array.readRecord().size * array.primitiveType.byteSize
)
}
}
}
@Test fun `HeapInstance#byteSize reads 0 bytes`() {
val hprofFile = "leak_asynctask_o.hprof".classpathFile()
val source = MetricsDualSourceProvider(hprofFile)
val bytesRead = source.openHeapGraph().use { graph ->
val bytesReadMetrics = source.sourcesMetrics.last().apply { clear() }
graph.instances.first().byteSize
bytesReadMetrics.sum()
}
assertThat(bytesRead).isEqualTo(0)
}
@Test fun `consecutive call to HeapObject#readRecord() reads 0 bytes`() {
val hprofFile = "leak_asynctask_o.hprof".classpathFile()
val source = MetricsDualSourceProvider(hprofFile)
val bytesRead = source.openHeapGraph().use { graph ->
graph.objects.first().readRecord()
val bytesReadMetrics = source.sourcesMetrics.last().apply { clear() }
graph.objects.first().readRecord()
bytesReadMetrics.sum()
}
assertThat(bytesRead).isEqualTo(0)
}
@Test fun `HeapObject#readRecord() reads 0 bytes when reading from LRU`() {
val hprofFile = "leak_asynctask_o.hprof".classpathFile()
val source = MetricsDualSourceProvider(hprofFile)
val bytesRead = source.openHeapGraph().use { graph ->
graph.objects.take(HPROF_HEAP_GRAPH_LRU_OBJECT_CACHE_SIZE).forEach { it.readRecord() }
val bytesReadMetrics = source.sourcesMetrics.last().apply { clear() }
graph.objects.take(HPROF_HEAP_GRAPH_LRU_OBJECT_CACHE_SIZE).forEach { it.readRecord() }
bytesReadMetrics.sum()
}
assertThat(bytesRead).isEqualTo(0)
}
@Test fun `HeapObject#readRecord() reads bytes when reading evicted object`() {
val hprofFile = "leak_asynctask_o.hprof".classpathFile()
val source = MetricsDualSourceProvider(hprofFile)
val bytesRead = source.openHeapGraph().use { graph ->
graph.objects.take(HPROF_HEAP_GRAPH_LRU_OBJECT_CACHE_SIZE + 1).forEach { it.readRecord() }
val bytesReadMetrics = source.sourcesMetrics.last().apply { clear() }
graph.objects.first().readRecord()
bytesReadMetrics.sum()
}
assertThat(bytesRead).isGreaterThan(0)
}
@Test fun `analyze() creates 4 separate sources`() {
val hprofFile = "leak_asynctask_o.hprof".classpathFile()
val metrics = trackAnalyzeIoReadMetrics(hprofFile)
// 4 phases: Read headers, fast scan, indexing, then random access for analysis.
assertThat(metrics).hasSize(4)
}
@Test fun `header parsing requires only one segment`() {
val hprofFile = "leak_asynctask_o.hprof".classpathFile()
val metrics = trackAnalyzeIoReadMetrics(hprofFile)
val headerParsingReads = metrics[0]
assertThat(headerParsingReads).isEqualTo(listOf(OKIO_SEGMENT_SIZE))
}
@Test fun `fast scan pre indexing is a full file scan`() {
val hprofFile = "leak_asynctask_o.hprof".classpathFile()
val metrics = trackAnalyzeIoReadMetrics(hprofFile)
val fastScanReads = metrics[1]
val expectedReads = fullScanExpectedReads(hprofFile.length())
assertThat(fastScanReads).hasSameSizeAs(expectedReads).isEqualTo(expectedReads)
}
@Test fun `indexing is a full file scan`() {
val hprofFile = "leak_asynctask_o.hprof".classpathFile()
val metrics = trackAnalyzeIoReadMetrics(hprofFile)
val indexingReads = metrics[2]
val expectedReads = fullScanExpectedReads(hprofFile.length())
assertThat(indexingReads).hasSameSizeAs(expectedReads).isEqualTo(expectedReads)
}
@Test fun `freeze leak_asynctask_o hprof random access metrics`() {
val hprofFile = "leak_asynctask_o.hprof".classpathFile()
val metrics = trackAnalyzeRandomAccessMetrics(hprofFile)
assertThat(
listOf(
metrics.first.readsCount, metrics.first.medianBytesRead, metrics.first.totalBytesRead,
metrics.second.readsCount, metrics.second.medianBytesRead, metrics.second.totalBytesRead
)
)
.isEqualTo(
listOf(
24384, 40.0, 1244990, 25653, 40.0, 1302298
)
)
}
@Test fun `freeze leak_asynctask_m hprof random access metrics`() {
val hprofFile = "leak_asynctask_m.hprof".classpathFile()
val metrics = trackAnalyzeRandomAccessMetrics(hprofFile)
assertThat(
listOf(
metrics.first.readsCount, metrics.first.medianBytesRead, metrics.first.totalBytesRead,
metrics.second.readsCount, metrics.second.medianBytesRead, metrics.second.totalBytesRead
)
)
.isEqualTo(
listOf(
22472, 40.0, 2202271, 22477, 40.0, 2202451
)
)
}
@Test fun `freeze leak_asynctask_pre_m hprof random access metrics`() {
val hprofFile = "leak_asynctask_pre_m.hprof".classpathFile()
val metrics = trackAnalyzeRandomAccessMetrics(hprofFile)
assertThat(
listOf(
metrics.first.readsCount, metrics.first.medianBytesRead, metrics.first.totalBytesRead,
metrics.second.readsCount, metrics.second.medianBytesRead, metrics.second.totalBytesRead
)
)
.isEqualTo(
listOf(
16829, 32.0, 765450, 16831, 32.0, 765514
)
)
}
class Reads(reads: List<Int>) {
val readsCount = reads.size
val medianBytesRead = reads.median()
val totalBytesRead = reads.sum()
}
private fun trackAnalyzeRandomAccessMetrics(hprofFile: File): Pair<Reads, Reads> {
return trackAnalyzeIoReadMetrics(hprofFile).run {
Reads(this[3])
} to trackAnalyzeIoReadMetrics(
hprofFile,
computeRetainedHeapSize = true,
printResult = true
).run {
Reads(this[3])
}
}
private fun trackAnalyzeIoReadMetrics(
hprofFile: File,
computeRetainedHeapSize: Boolean = false,
printResult: Boolean = false
): List<List<Int>> {
val source = MetricsDualSourceProvider(hprofFile)
val analysis = source.openHeapGraph().use { graph ->
val leakingObjectFinder = FilteringLeakingObjectFinder(
AndroidObjectInspectors.appLeakingObjectFilters
)
val objectIds = leakingObjectFinder.findLeakingObjectIds(graph)
val referenceMatchers = AndroidReferenceMatchers.appDefaults
val tracer = RealLeakTracerFactory(
shortestPathFinderFactory = PrioritizingShortestPathFinder.Factory(
listener = {},
referenceReaderFactory = AndroidReferenceReaderFactory(referenceMatchers),
gcRootProvider = MatchingGcRootProvider(referenceMatchers),
computeRetainedHeapSize = computeRetainedHeapSize,
),
objectInspectors = AndroidObjectInspectors.appDefaults,
listener = {}
).createFor(graph)
tracer.traceObjects(objectIds)
}
if (printResult) {
println(analysis)
}
return source.sourcesMetrics
}
private fun fullScanExpectedReads(fileLength: Long): List<Int> {
val fullReadsCount = floor(fileLength / OKIO_SEGMENT_SIZE.toDouble()).toInt()
val remainderBytes = (fileLength - (OKIO_SEGMENT_SIZE * fullReadsCount)).toInt()
val finalReads = if (remainderBytes > 0) listOf(remainderBytes, 0) else listOf(0)
return List(fullReadsCount) {
OKIO_SEGMENT_SIZE
} + finalReads
}
companion object {
private const val OKIO_SEGMENT_SIZE = 8192
private const val HPROF_HEAP_GRAPH_LRU_OBJECT_CACHE_SIZE = 3000
}
}