Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Avoid creating SparseVectors for LOCO #377

Merged
merged 19 commits into from
Aug 21, 2019
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
try sparse vector clone for test fix
  • Loading branch information
gerashegalov committed Aug 5, 2019
commit 35b8b5c824a11d5958f2af72375434cf5ea34d44
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ import com.salesforce.op.stages.impl.feature.TimePeriod
import com.salesforce.op.stages.impl.selector.SelectedModel
import com.salesforce.op.stages.sparkwrappers.specific.OpPredictorWrapperModel
import com.salesforce.op.stages.sparkwrappers.specific.SparkModelConverter._
import com.salesforce.op.utils.spark.RichVector._
import com.salesforce.op.utils.spark.RichVector.RichSparseVector
import com.salesforce.op.utils.spark.{OpVectorColumnHistory, OpVectorMetadata}
import enumeratum.{Enum, EnumEntry}
import org.apache.spark.annotation.Experimental
Expand Down Expand Up @@ -130,16 +130,11 @@ class RecordInsightsLOCO[T <: Model[T]]

private def computeDiff
(
i: Int,
oldInd: Int,
featureSparse: SparseVector,
baseScore: Array[Double]
): Array[Double] = {
val oldVal = featureSparse.update(i, oldInd, 0.0)
val score = modelApply(labelDummy, featureSparse.toOPVector).score
val diffs = baseScore.zip(score).map { case (b, s) => b - s }
featureSparse.update(i, oldInd, oldVal)
diffs
baseScore.zip(score).map { case (b, s) => b - s }
}

private def sumArrays(left: Array[Double], right: Array[Double]): Array[Double] = {
Expand Down Expand Up @@ -225,7 +220,9 @@ class RecordInsightsLOCO[T <: Model[T]]
val zdif = Array.fill(baseScore.length)(0.0)
featureVec match {
case Left(sparse) => (0 until sparse.size, sparse.indices).zipped
.map { case (i, oldInd) => (i, oldInd, computeDiff(i, oldInd, sparse, baseScore)) }
.map { case (i, oldInd) =>
(i, oldInd, computeDiff(sparse.copy.updated(i, oldInd, 0.0), baseScore))
}
case Right(zeroeIndices) => (0 until zeroeIndices.length, zeroeIndices).zipped
.map { case (i, oldInd) => (i + offset, oldInd, zdif) }
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -134,20 +134,11 @@ object RichVector {
}

implicit class RichSparseVector(val v: SparseVector) extends AnyVal {
def foreachNonZeroIndexedValue(f: (Int, Int, Double) => Unit): Unit = {
(0 until v.indices.length)
.withFilter(v.values(_) != 0.0)
.foreach(i => f(i, v.indices(i), v.values(i)))
}

def update(index: Int, indexVal: Int, value: Double): Double = {
def updated(index: Int, indexVal: Int, value: Double): SparseVector = {
require(v.indices(index) == indexVal,
s"Invalid index: indices($index)==${v.indices(index)}, expected: $indexVal")
val oldVal = v.values(index)
v.values(index) = value
oldVal
v
}

def toIndexedArray: Array[(Int, Double)] = v.indices.zip(v.values)
}
}