Commit e9378161 authored by Guillaume Chérel's avatar Guillaume Chérel
Browse files

Saltelli sampling beginnig.

parent 40b75e55
......@@ -460,7 +460,7 @@ lazy val evolution = OsgiProject(pluginDir, "org.openmole.plugin.method.evolutio
lazy val directSampling = OsgiProject(pluginDir, "org.openmole.plugin.method.directsampling", imports = Seq("*")) dependsOn(openmoleDSL, distributionDomain, pattern, modifierDomain) settings (pluginSettings: _*)
lazy val sensitivity = OsgiProject(pluginDir, "org.openmole.plugin.method.sensitivity", imports = Seq("*")) dependsOn(exception, workflow, workspace, openmoleDSL, lhsSampling) settings (pluginSettings: _*)
lazy val sensitivity = OsgiProject(pluginDir, "org.openmole.plugin.method.sensitivity", imports = Seq("*")) dependsOn(exception, workflow, workspace, openmoleDSL, lhsSampling, directSampling) settings (pluginSettings: _*)
/* Sampling */
......
......@@ -25,6 +25,8 @@ import org.openmole.core.workflow.sampling._
import org.openmole.core.workspace._
import org.openmole.tool.random._
import cats.implicits._
import org.openmole.core.exception.UserBadDataError
import scala.annotation.tailrec
import org.openmole.core.tools.math._
......@@ -48,15 +50,15 @@ object Scalable {
override def prototype(t: ScalableNumber): Val[_] = t.prototype
override def size(t: ScalableNumber): FromContext[Int] = 1
override def scaled(s: ScalableNumber)(genomePart: Seq[Double]): FromContext[Scaled] = {
val g = genomePart.head
assert(!g.isNaN)
override def unflatten(s: ScalableNumber)(sequence: Seq[Double], scale: Boolean): FromContext[Scaled] = {
val g = sequence.head
(s.min map2 s.max) { (min, max)
val sc = g.scale(min, max)
val sc = if (scale) g.scale(min, max) else g
ScaledScalar(s.prototype, sc)
}
}
override def toVariable(t: ScalableNumber)(value: Seq[Any]): Variable[_] =
Variable.unsecure(prototype(t).toArray, value.map(_.asInstanceOf[Double]).toArray[Double])
}
......@@ -69,8 +71,8 @@ object Scalable {
override def inputs(t: Factor[D, Double]): PrototypeSet = Seq()
override def prototype(t: Factor[D, Double]): Val[_] = scalarIsScalable.prototype(factorToScalar(t))
override def size(t: Factor[D, Double]) = scalarIsScalable.size(factorToScalar(t))
override def scaled(t: Factor[D, Double])(genomePart: Seq[Double]): FromContext[Scaled] =
scalarIsScalable.scaled(factorToScalar(t))(genomePart)
override def unflatten(t: Factor[D, Double])(genomePart: Seq[Double], scale: Boolean): FromContext[Scaled] =
scalarIsScalable.unflatten(factorToScalar(t))(genomePart, scale)
override def toVariable(t: Factor[D, Double])(value: Seq[Any]): Variable[_] =
scalarIsScalable.toVariable(factorToScalar(t))(value)
}
......@@ -84,12 +86,12 @@ object Scalable {
override def size(t: Factor[D, Array[Double]]): FromContext[Int] =
(bounded.min(t.domain) map2 bounded.max(t.domain)) { case (min, max) math.min(min.size, max.size) }
override def scaled(t: Factor[D, Array[Double]])(values: Seq[Double]): FromContext[Scaled] = {
override def unflatten(t: Factor[D, Array[Double]])(sequence: Seq[Double], scale: Boolean): FromContext[Scaled] = {
def scaled =
(bounded.min(t.domain) map2 bounded.max(t.domain)) {
case (min, max)
(values zip (min zip max)).map { case (g, (min, max)) g.scale(min, max) }
(sequence zip (min zip max)).map { case (g, (min, max)) if (scale) g.scale(min, max) else g }
}
scaled.map { sc ScaledSequence(t.prototype, sc.toArray) }
......@@ -107,29 +109,36 @@ trait Scalable[T] {
def inputs(t: T): PrototypeSet
def prototype(t: T): Val[_]
def size(t: T): FromContext[Int]
def scaled(t: T)(values: Seq[Double]): FromContext[Scalable.Scaled]
def unflatten(t: T)(sequence: Seq[Double], scale: Boolean): FromContext[Scalable.Scaled]
def toVariable(t: T)(value: Seq[Any]): Variable[_]
}
object ScalarOrSequenceOfDouble {
def scaled(scales: Seq[ScalarOrSequenceOfDouble[_]], values: Seq[Double]): FromContext[List[Variable[_]]] = {
@tailrec def scaled0(scales: List[ScalarOrSequenceOfDouble[_]], values: List[Double], acc: List[Variable[_]] = Nil)(context: Context, rng: RandomProvider, newFile: NewFile, fileService: FileService): List[Variable[_]] =
def unflatten(scales: Seq[ScalarOrSequenceOfDouble[_]], values: Seq[Double], scale: Boolean = true): FromContext[List[Variable[_]]] = {
@tailrec def unflatten0(scales: List[ScalarOrSequenceOfDouble[_]], values: List[Double], acc: List[Variable[_]] = Nil)(context: Context, rng: RandomProvider, newFile: NewFile, fileService: FileService): List[Variable[_]] =
if (scales.isEmpty || values.isEmpty) acc.reverse
else {
val input = scales.head
val (variable, tail) =
input.scaled(values).map {
input.unflatten(values, scale).map {
case Scalable.ScaledScalar(p, v) Variable(p, v) values.tail
case Scalable.ScaledSequence(p, v) Variable(p, v) values.drop(input.size(context)(rng, newFile, fileService))
}.from(context)(rng, newFile, fileService)
scaled0(scales.tail, tail, variable :: acc)({ context + variable }, rng, newFile, fileService)
unflatten0(scales.tail, tail, variable :: acc)({ context + variable }, rng, newFile, fileService)
}
FromContext { p scaled0(scales.toList, values.toList)(p.context, p.random, p.newFile, p.fileService) }
FromContext { p unflatten0(scales.toList, values.toList)(p.context, p.random, p.newFile, p.fileService) }
}
def flatten(values: Seq[Any]) =
values map {
case x: Double Array(x)
case x: Array[Double] x
case x throw new InternalError(s"Value $x of type ${x.getClass} should be of type Double or Array[Double]")
}
implicit def fromScalable[T: Scalable](t: T): ScalarOrSequenceOfDouble[T] = new ScalarOrSequenceOfDouble(t, implicitly[Scalable[T]])
}
......@@ -138,6 +147,6 @@ class ScalarOrSequenceOfDouble[T](t: T, scalable: Scalable[T]) {
def inputs = scalable.inputs(t)
def prototype = scalable.prototype(t)
def size = scalable.size(t)
def scaled(values: Seq[Double]) = scalable.scaled(t)(values)
def unflatten(values: Seq[Double], scale: Boolean) = scalable.unflatten(t)(values, scale)
def toVariable(value: Seq[Any]): Variable[_] = scalable.toVariable(t)(value)
}
......@@ -184,7 +184,7 @@ sealed class MorrisSampling(
val variablesForRefRun: List[Variable[_]] = List(
Variable(MorrisSampling.varFactorName, ""),
Variable(MorrisSampling.varDelta, 0.0)
) ++ ScalarOrSequenceOfDouble.scaled(factors, t.seed).from(context)
) ++ ScalarOrSequenceOfDouble.unflatten(factors, t.seed).from(context)
// forge lists of lists of variables for the runs of the trajectory
val variablesForElementaryEffects = (t.points, t.deltas, t.variableOrder.zipWithIndex).zipped.map(
......@@ -194,7 +194,7 @@ sealed class MorrisSampling(
List(
Variable(MorrisSampling.varFactorName, factors(factoridx).prototype.name),
Variable(MorrisSampling.varDelta, point(factoridx) - t.seed(factoridx))
) ++ ScalarOrSequenceOfDouble.scaled(factors, point).from(context)
) ++ ScalarOrSequenceOfDouble.unflatten(factors, point).from(context)
})
variablesForRefRun :: variablesForElementaryEffects
......
package org.openmole.plugin.method.sensitivity
import org.openmole.core.context._
import org.openmole.core.workflow.builder._
import org.openmole.core.workflow.task._
//import org.openmole.core.dsl._
object SaltelliAggregation {
val namespace = Namespace("saltelli")
// SensitivityIndices
val firstOrderSI = Val[Array[Double]]("firstOrderSI", namespace = namespace)
val totalOrderSI = Val[Array[Double]]("totalOrderSI", namespace = namespace)
/**
* Compute the first and total order effects from the given output values
* of a model. From Saltelli 2010 Variance based sensitivity analysis of model output.
* fA(j) contains the model output value for the j-th row of matrix A (see paper),
* fB(j) contains the model output value for the j-th row of B,
* fC(i)(j) contains the model output for the j-th row of matrix
* C^i (the matrix where all columns are from B except the i-th which
* is from A).
*/
def firstAndTotalOrderIndices(
fA: Seq[Option[Double]],
fB: Seq[Option[Double]],
fC: Seq[Seq[Option[Double]]]): (Seq[Double], Seq[Double]) = {
val fBSome = fB.collect { case Some(i) i }
val NB = fBSome.size
val k = fC.size //Number of parameters
val f02 = math.pow(fBSome.sum / NB.toDouble, 2)
val varY = fBSome.map(fBj math.pow(fBj, 2)).sum / NB.toDouble - f02
def avgProduct(u: Seq[Option[Double]], v: Seq[Option[Double]]): Double = {
val prods = (u zip v).collect({ case (Some(uj), Some(vj)) uj * vj })
prods.sum / prods.size.toDouble
}
val firstOrderEffects = (1 to k).map { i
{
val sumTerms = (fA zip fB zip fC(i - 1)).collect {
case ((Some(fAj), Some(fBj)), Some(fCij)) fBj * (fCij - fAj)
}
val N = sumTerms.size
(sumTerms.sum / N) / varY
}
}.toVector
val totalOrderEffects = (1 to k).map { i
{
val squaredDiff = (fA zip fC(i - 1)).collect {
case (Some(fAj), Some(fCij)) math.pow(fAj - fCij, 2)
}
val N = squaredDiff.size
(squaredDiff.sum / (2.0 * N)) / varY
}
}.toVector
(firstOrderEffects, totalOrderEffects)
}
// def totalOrder(a: Seq[Double], b: Seq[Double], c: Seq[Double]) = {
// val n = a.size
// val bxcAvg = (b zip c map { case (b, c) ⇒ b * c } sum) / n
// val axaAvg = (a map { a ⇒ a * a } sum) / n
// val f0 = (a sum) / n
// 1 - (bxcAvg - math.pow(f0, 2)) / (axaAvg - math.pow(f0, 2))
// }
def apply(inputs: Seq[Val[_]], outputs: Seq[Val[_]])(implicit name: sourcecode.Name, definitionScope: DefinitionScope) =
ClosureTask("SaltelliAggregation") { (context, _, _)
println(context)
val matrixNames: Array[String] =
context(SaltelliSampling.matrixName.array)
// an array of a,b,c for each pair of inputs
def indices(names: Array[String], value: String) = (names zipWithIndex).filter(_._1 == value).map(_._2)
//indices(matrixNames, "a").map()
context /*+
(SaltelliAggregation.firstOrderSI, ???) +
(SaltelliAggregation.totalOrderSI, ???)
*/
}
}
......@@ -53,23 +53,23 @@ class SaltelliSampling(val samples: FromContext[Int], val factors: ScalarOrSeque
def toVariables(
matrix: Array[Array[Double]],
m: String
m: Namespace
): List[Iterable[Variable[_]]] =
matrix.zipWithIndex.map {
case (l, index)
def line = ScalarOrSequenceOfDouble.scaled(factors, l).from(context)
Variable(SaltelliSampling.matrixName, m) :: Variable(SaltelliSampling.matrixIndex, index) :: line
def line = ScalarOrSequenceOfDouble.unflatten(factors, l).from(context)
Variable(SaltelliSampling.matrixName, m.toString) :: Variable(SaltelliSampling.matrixIndex, index.toString) :: line
}.toList
def aVariables = toVariables(a, "a")
def bVariables = toVariables(b, "b")
def aVariables = toVariables(a, Namespace("a"))
def bVariables = toVariables(b, Namespace("b"))
def cVariables =
cIndices.zipWithIndex.flatMap {
case ((f, j, scalar), i)
val matrixName =
if (scalar) f.prototype.withNamespace(Namespace("c")).name
else f.prototype.withNamespace(Namespace("c", j.toString)).name
if (scalar) Namespace("c", f.prototype.name)
else Namespace("c", j.toString, f.prototype.name)
toVariables(
SaltelliSampling.buildC(i, a, b),
......
......@@ -19,6 +19,7 @@
package org.openmole.plugin.method
import org.openmole.core.context._
import org.openmole.core.expansion.FromContext
import org.openmole.core.outputmanager.OutputManager
import org.openmole.core.workflow.builder.DefinitionScope
import org.openmole.core.workflow.dsl._
......@@ -30,6 +31,8 @@ import org.openmole.core.workflow.tools.ScalarOrSequenceOfDouble
import org.openmole.core.workflow.validation.DataflowProblem._
import org.openmole.core.workflow.validation._
import org.openmole.core.workflow.transition.Slot
import org.openmole.plugin.method.directsampling._
import org.openmole.core.dsl
package object sensitivity {
......@@ -101,5 +104,26 @@ package object sensitivity {
}
def SensitivitySaltelli(
evaluation: Puzzle,
inputs: Seq[ScalarOrSequenceOfDouble[_]],
outputs: Seq[Val[Double]],
samples: FromContext[Int]) = {
val sampling = SaltelliSampling(samples, inputs: _*)
val aggregation = SaltelliAggregation(
inputs = (inputs.map(_.prototype.array) ++ SaltelliSampling.matrix.map(_.array)),
outputs = Seq(
SaltelliAggregation.totalOrderSI.array,
SaltelliAggregation.firstOrderSI.array)
)
DirectSampling(
evaluation = evaluation,
sampling = sampling,
aggregation = aggregation
)
}
}
......@@ -46,7 +46,7 @@ sealed class LHS(val samples: FromContext[Int], val factors: ScalarOrSequenceOfD
val s = samples.from(context)
val vectorSize = factors.map(_.size(context)).sum
def values = LHS.lhsValues(vectorSize, s, random())
values.map(v ScalarOrSequenceOfDouble.scaled(factors, v).from(context)).toIterator
values.map(v ScalarOrSequenceOfDouble.unflatten(factors, v).from(context)).toIterator
}
}
......@@ -44,6 +44,6 @@ sealed class SobolSampling[D](val samples: FromContext[Int], val factors: Scalar
for {
v Iterator.continually(sequence.nextVector()).take(s)
} yield ScalarOrSequenceOfDouble.scaled(factors, v.toSeq)(context)
} yield ScalarOrSequenceOfDouble.unflatten(factors, v.toSeq)(context)
}
}
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment