Commit e00fe5c9 authored by Noric Couderc's avatar Noric Couderc
Browse files

Implemented normalization of features

We normalize the features by using the existing FeatureRatio, it's just
printed differently.
parent b3e0c6d0
......@@ -12,6 +12,7 @@ import se.lth.cs.papicounters.MockupPapiTracerRunner
import se.lth.cs.papicounters.PAPICounter
import se.lth.cs.papicounters.PapiTracerRunner
import se.lth.cs.util.brainyFeatures
import se.lth.cs.util.normalize
import java.io.File
import java.io.FileReader
import java.io.FileWriter
......@@ -87,7 +88,7 @@ class PapiCommandLine : CliktCommand() {
PapiTracerRunner()
}
val features = brainyFeatures()
val features = if (normalizeFeatures) { normalize(brainyFeatures()) } else { brainyFeatures() }
SyntheticBenchmarkExperimentPrinter(writer,
methodOutputFormat,
numberRuns, features,
......
......@@ -46,6 +46,7 @@ class Experiment(val numberIterations : Int,
// specific iteration!
features.accept(this)
val values = currentIterationResults.toMap()
results.add(Result(iterationNumber, benchmark, values))
iterationNumber++
}
......
......@@ -96,7 +96,7 @@ class CostFeature(val opType : OperationType) : Feature() {
}
}
class FeatureRatio(val f1 : Feature, val f2 : Feature) : Feature() {
open class FeatureRatio(val f1 : Feature, val f2 : Feature) : Feature() {
override fun featureType(): Type {
return Type.RATIO
}
......@@ -151,25 +151,32 @@ class TotalMethodInvocations() : Feature() {
}
/**
* This is how we normalize some features,
* by taking the ratio with some others:
* example: Normalize PAPI_L1_DCM by PAPI_TOT_CYC
* is just dividing
* Normalized features are features, except you divide them
* by a normalization features.
*
* They are, not printed the same way as feature ratios, though.
*/
fun normalize(f : Feature, n : Feature): Feature {
return FeatureRatio(f, n)
class NormalizedFeature(f : Feature, n : Feature) : FeatureRatio(f, n) {
override fun featureType(): Type {
return f1.featureType()
}
override fun toString(): String {
return "$f1 (normalized by $f2)"
}
}
fun defaultNormalize(f : Feature) : Feature? {
fun defaultNormalize(f : Feature) : Feature {
if (f.featureType() == Feature.Type.SOFTWARE) {
return normalize(f, TotalMethodInvocations())
return NormalizedFeature(f, TotalMethodInvocations())
}
if (f.featureType() == Feature.Type.HARDWARE) {
return normalize(f, PAPICounter("PAPI_TOT_CYC"))
return NormalizedFeature(f, PAPICounter("PAPI_TOT_CYC"))
}
return null
// When we don't know, we just return the feature as-is.
return f
}
/**
......@@ -190,6 +197,14 @@ class FeatureSet(vararg features : Feature) : ArrayList<Feature>() {
}
}
/**
* We normalize a featureSet by applying normalization on all the features!
*/
fun normalize(featureSet : FeatureSet): FeatureSet {
val features = featureSet.map { defaultNormalize(it) }.toTypedArray()
return FeatureSet(*features)
}
fun brainyFeatures() : FeatureSet {
return FeatureSet(
PAPICounter("PAPI_BR_MSP"),
......
......@@ -91,4 +91,25 @@ class ExperimentTest {
Assertions.assertTrue(iteration.values.values.all { it > 0 })
}
}
@Test
fun testNormalizationBrainyFeatures() {
val bench = BCBenchmarkPackage.LIST(1234, 100, 0, ArrayList<Int>())
val features = normalize(brainyFeatures())
val exp = Experiment(10, bench, features,runner)
val results = exp.run()
for (iteration in results) {
Assertions.assertEquals(bench, iteration.benchmark)
Assertions.assertTrue(iteration.values.values.all { it > 0 })
val normalizedFeatures = iteration.values.keys.filterIsInstance<NormalizedFeature>()
for (f in normalizedFeatures) {
val value = iteration.values[f]!!
Assertions.assertTrue(value <= 1.0)
}
}
}
}
\ No newline at end of file
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment