Skip to content

Commit

Permalink
Merge branch 'refactor-tfjs-api' into max-operator
Browse files Browse the repository at this point in the history
  • Loading branch information
cupertank committed Jul 12, 2023
2 parents d90c38e + 252a1c3 commit b2aafad
Show file tree
Hide file tree
Showing 93 changed files with 1,357 additions and 431 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -92,9 +92,10 @@ object KIOperatorFactory : OperatorFactory<KIONNXData<*>> {
"Gelu" -> Gelu(name, version, attributes, inputs, outputs)
"Gemm" -> Gemm(name, version, attributes, inputs, outputs)
"Greater" -> Greater(name, version, attributes, inputs, outputs)
"GRU" -> GRU(name, version, attributes, inputs, outputs)
"Hardmax" -> Hardmax(name, version, attributes, inputs, outputs)
"IsInf" -> IsInf(name, version, attributes, inputs, outputs)
"GRU" -> GRU(name, version, attributes, inputs, outputs)
"IsNaN" -> IsNaN(name, version, attributes, inputs, outputs)
"Identity" -> Identity(name, version, attributes, inputs, outputs)
"If" -> If(name, version, attributes, inputs, outputs)
"LayerNormalization" -> LayerNormalization(name, version, attributes, inputs, outputs)
Expand Down Expand Up @@ -140,6 +141,7 @@ object KIOperatorFactory : OperatorFactory<KIONNXData<*>> {
"TreeEnsembleRegressor" -> TreeEnsembleRegressor(name, version, attributes, inputs, outputs)
"Unsqueeze" -> Unsqueeze(name, version, attributes, inputs, outputs)
"Where" -> Where(name, version, attributes, inputs, outputs)
"Xor" -> Xor(name, version, attributes, inputs, outputs)
"ZipMap" -> ZipMap(name, version, attributes, inputs, outputs)
else -> error("Unsupported operator: $opType")
} as Operator<KIONNXData<*>, KIONNXData<*>>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ import io.kinference.ndarray.arrays.tiled.FloatTiledArray
import io.kinference.ndarray.extensions.allocateNDArray
import io.kinference.ndarray.extensions.dotTransposedWithAlpha
import io.kinference.operator.*
import io.kinference.optimizer.GraphOptimizer.Companion.optName
import io.kinference.optimizer.GraphOptimizer.Companion.isOpt
import io.kinference.protobuf.message.AttributeProto
import io.kinference.protobuf.message.TensorProto
import kotlinx.coroutines.coroutineScope
Expand Down Expand Up @@ -265,12 +265,11 @@ class AttentionVer1(name: String, attributes: Map<String, Attribute<Any>>, input
override suspend fun <D : ONNXData<*, *>> apply(contexts: Contexts<D>, inputs: List<KITensor?>): List<KITensor?> {
val input = inputs[0]!!
val weights = inputs[1]!!
val preparedWeights = (contexts.graph!!.getOrNullValue(optName(weights.name))
?: AttentionContextRule.prepareWeights(weights, numHeads)) as KITensor

val preparedWeights = weights.takeIf { isOpt(it.name) } ?: AttentionContextRule.prepareWeights(weights, numHeads)

val bias = inputs[2]!!
val preparedBias = (contexts.graph!!.getOrNullValue(optName(bias.name))
?: AttentionContextRule.prepareBias(bias, numHeads)) as KITensor
val preparedBias = bias.takeIf { isOpt(it.name) } ?: AttentionContextRule.prepareBias(bias, numHeads)

val maskIndices = inputs.elementAtOrNull(3)?.data as IntNDArray?
val past = inputs.elementAtOrNull(4)?.data
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ import io.kinference.graph.Contexts
import io.kinference.ndarray.arrays.*
import io.kinference.ndarray.extensions.tryDequantize
import io.kinference.operator.*
import io.kinference.optimizer.GraphOptimizer.Companion.optName
import io.kinference.optimizer.GraphOptimizer.Companion.isOpt
import io.kinference.protobuf.message.AttributeProto
import io.kinference.protobuf.message.TensorProto

Expand Down Expand Up @@ -106,14 +106,12 @@ class QAttentionVer1(name: String, attributes: Map<String, Attribute<Any>>, inpu
val weights = inputs[1]!!
val weightsScale = inputs[4]!!
val weightsZeroPoint = inputs.getOrNull(7)

val preparedWeights = (contexts.graph!!.getOrNullValue(optName(weights.name))
?: QAttentionContextRule.prepareWeights(weights, weightsScale, weightsZeroPoint, numHeads)) as KITensor
val preparedWeights = weights.takeIf { isOpt(it.name) }
?: QAttentionContextRule.prepareWeights(weights, weightsScale, weightsZeroPoint, numHeads)

val bias = inputs[2]!!

val preparedBias = (contexts.graph!!.getOrNullValue(optName(bias.name))
?: AttentionContextRule.prepareBias(bias, numHeads)) as KITensor
val preparedBias = bias.takeIf { isOpt(it.name) }
?: AttentionContextRule.prepareBias(bias, numHeads)

val maskIndices = inputs.getOrNull(5)?.data as IntNDArray?
val past = inputs.getOrNull(8)?.data as NumberNDArrayCore?
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ import io.kinference.graph.Contexts
import io.kinference.ndarray.arrays.IntNDArray
import io.kinference.ndarray.arrays.NumberNDArrayCore
import io.kinference.operator.*
import io.kinference.optimizer.GraphOptimizer.Companion.optName
import io.kinference.optimizer.GraphOptimizer.Companion.isOpt
import io.kinference.protobuf.message.AttributeProto
import io.kinference.protobuf.message.TensorProto

Expand Down Expand Up @@ -99,14 +99,16 @@ class GRUVer7(
val input = inputs[0]!!

val weights = inputs[1]!!
val preparedWeights = (contexts.graph!!.getOrNullValue(optName(weights.name)) ?: GRUContextRule.prepareWeights(weights))
val preparedWeights = weights.takeIf { isOpt(it.name) } ?: GRUContextRule.prepareWeights(weights)

val recurrentWeights = inputs[2]!!
val preparedRecurrentWeights = (contexts.graph!!.getOrNullValue(optName(recurrentWeights.name))
?: GRUContextRule.prepareWeights(recurrentWeights)) as KITensor
val preparedRecurrentWeights = recurrentWeights.takeIf { isOpt(it.name) }
?: GRUContextRule.prepareWeights(recurrentWeights)

val bias = inputs.getOrNull(3)
val preparedBias = bias?.let { contexts.graph!!.getOrNullValue(optName(it.name)) ?: GRUContextRule.prepareBias(it) }
val preparedBias = bias?.let { tensor ->
tensor.takeIf { isOpt(it.name) } ?: GRUContextRule.prepareBias(tensor)
}

val sequenceLens = inputs.getOrNull(4)
val initialHiddenState = inputs.getOrNull(5)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ import io.kinference.graph.Contexts
import io.kinference.ndarray.arrays.IntNDArray
import io.kinference.ndarray.arrays.NumberNDArrayCore
import io.kinference.operator.*
import io.kinference.optimizer.GraphOptimizer.Companion.optName
import io.kinference.optimizer.GraphOptimizer.Companion.isOpt
import io.kinference.protobuf.message.AttributeProto
import io.kinference.protobuf.message.TensorProto

Expand Down Expand Up @@ -101,24 +101,23 @@ class LSTMVer7(
val inputAsLSTMInput = DefaultLSTMInput(input.data as NumberNDArrayCore)

val weights = inputs[1]!!
val preparedWeights = (contexts.graph!!.getOrNullValue(optName(weights.name))
?: LSTMContextRule.prepareWeights(weights)) as KITensor
val preparedWeights = weights.takeIf { isOpt(it.name) } ?: LSTMContextRule.prepareWeights(weights)
val weightsAsLSTMWeights = DefaultLSTMWeights(preparedWeights.data as NumberNDArrayCore)

val recurrentWeights = inputs[2]!!
val preparedRecurrentWeights = (contexts.graph!!.getOrNullValue(optName(recurrentWeights.name))
?: LSTMContextRule.prepareWeights(recurrentWeights)) as KITensor
val preparedRecurrentWeights = recurrentWeights.takeIf { isOpt(it.name) }
?: LSTMContextRule.prepareWeights(recurrentWeights)
val recurrentWeightsAsLSTMWeights = DefaultLSTMWeights(preparedRecurrentWeights.data as NumberNDArrayCore)

val bias = inputs.getOrNull(3)
val preparedBias = bias?.let {
contexts.graph!!.getOrNullValue(optName(it.name)) ?: LSTMContextRule.prepareBias(it)
} as KITensor?
val preparedBias = bias?.let { tensor ->
tensor.takeIf { isOpt(it.name) } ?: LSTMContextRule.prepareBias(tensor)
}

val peepholes = inputs.getOrNull(7)
val preparedPeepholes = peepholes?.let {
contexts.graph!!.getOrNullValue(optName(it.name)) ?: LSTMContextRule.preparePeepholes(it)
} as KITensor?
val preparedPeepholes = peepholes?.let { tensor ->
tensor.takeIf { isOpt(it.name) } ?: LSTMContextRule.preparePeepholes(tensor)
}

val sequenceLens = inputs.getOrNull(4)
val initialState = inputs.getOrNull(5)
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,58 @@
package io.kinference.core.operators.logical

import io.kinference.attribute.Attribute
import io.kinference.core.data.tensor.KITensor
import io.kinference.core.data.tensor.asTensor
import io.kinference.data.ONNXData
import io.kinference.graph.Contexts
import io.kinference.ndarray.arrays.BooleanNDArray
import io.kinference.operator.*
import io.kinference.protobuf.message.TensorProto

sealed class Xor(
name: String,
info: OperatorInfo,
attributes: Map<String, Attribute<Any>>,
inputs: List<String>,
outputs: List<String>
) : Operator<KITensor, KITensor>(name, info, attributes, inputs, outputs) {
companion object {
private val DEFAULT_VERSION = VersionInfo(sinceVersion = 7)

operator fun invoke(name: String, version: Int?, attributes: Map<String, Attribute<Any>>, inputs: List<String>, outputs: List<String>): Xor {
return when (version ?: DEFAULT_VERSION.sinceVersion) {
in XorVer7.VERSION.asRange() -> XorVer7(name, attributes, inputs, outputs)
else -> error("Unsupported version of Xor operator: $version")
}
}
}
}

class XorVer7(
name: String,
attributes: Map<String, Attribute<Any>>,
inputs: List<String>,
outputs: List<String>
): Xor(name, INFO, attributes, inputs, outputs) {
companion object {
private val TYPE_CONSTRAINTS = setOf(TensorProto.DataType.BOOL)

private val INPUTS_INFO = listOf(
IOInfo(0, TYPE_CONSTRAINTS, "A", optional = false),
IOInfo(1, TYPE_CONSTRAINTS, "B", optional = false)
)

private val OUTPUTS_INFO = listOf(IOInfo(0, TYPE_CONSTRAINTS, "C", optional = false))

internal val VERSION = VersionInfo(sinceVersion = 7)
private val INFO = OperatorInfo("Xor", emptySet(), INPUTS_INFO, OUTPUTS_INFO, VERSION, OperatorInfo.DEFAULT_DOMAIN)
}

override suspend fun <D : ONNXData<*, *>> apply(contexts: Contexts<D>, inputs: List<KITensor?>): List<KITensor?> {
val left = inputs[0]!!.data as BooleanNDArray
val right = inputs[1]!!.data as BooleanNDArray

val ans = left xor right
return listOf(ans.asTensor("C"))
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ import io.kinference.data.ONNXData
import io.kinference.graph.Contexts
import io.kinference.ndarray.arrays.*
import io.kinference.operator.*
import io.kinference.optimizer.GraphOptimizer.Companion.optName
import io.kinference.optimizer.GraphOptimizer.Companion.isOpt
import io.kinference.protobuf.message.TensorProto

sealed class MatMulInteger(name: String, info: OperatorInfo, attributes: Map<String, Attribute<Any>>, inputs: List<String>, outputs: List<String>) : Operator<KITensor, KITensor>(name, info, attributes, inputs, outputs) {
Expand Down Expand Up @@ -52,10 +52,10 @@ class MatMulIntegerVer10(name: String, attributes: Map<String, Attribute<Any>>,
val firstZero = inputs.getOrNull(2)
val secondZero = inputs.getOrNull(3)

val firstPrepared = (contexts.graph!!.getOrNullValue(optName(first.name))
?: MatMulIntegerContextRule.prepareTensor(first, firstZero)) as KITensor
val secondPrepared = (contexts.graph!!.getOrNullValue(optName(second.name))
?: MatMulIntegerContextRule.prepareTensor(second, secondZero)) as KITensor
val firstPrepared = first.takeIf { isOpt(it.name) }
?: MatMulIntegerContextRule.prepareTensor(first, firstZero)
val secondPrepared = second.takeIf { isOpt(it.name) }
?: MatMulIntegerContextRule.prepareTensor(second, secondZero)

val output = (firstPrepared.data as NumberNDArrayCore)
.matmul(secondPrepared.data as NumberNDArrayCore)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ import io.kinference.data.ONNXData
import io.kinference.graph.Contexts
import io.kinference.ndarray.arrays.*
import io.kinference.operator.*
import io.kinference.optimizer.GraphOptimizer.Companion.optName
import io.kinference.optimizer.GraphOptimizer.Companion.isOpt
import io.kinference.protobuf.message.AttributeProto
import io.kinference.protobuf.message.TensorProto

Expand Down Expand Up @@ -89,22 +89,21 @@ class DynamicQuantizeLSTMVer1(name: String, attributes: Map<String, Attribute<An
val inputAsLSTMInput = QuantizedLSTMInput.create(input)

val weights = inputs[1]!!
val preparedWeights = (contexts.graph!!.getOrNullValue(optName(weights.name))
?: LSTMContextRule.prepareWeights(weights)) as KITensor
val preparedWeights = weights.takeIf { isOpt(it.name) } ?: LSTMContextRule.prepareWeights(weights)

val recurrentWeights = inputs[2]!!
val preparedRecurrentWeights = (contexts.graph!!.getOrNullValue(optName(recurrentWeights.name))
?: LSTMContextRule.prepareWeights(recurrentWeights)) as KITensor
val preparedRecurrentWeights = recurrentWeights.takeIf { isOpt(it.name) }
?: LSTMContextRule.prepareWeights(recurrentWeights)

val bias = inputs.getOrNull(3)
val preparedBias = bias?.let {
contexts.graph!!.getOrNullValue(optName(it.name)) ?: LSTMContextRule.prepareBias(it)
} as KITensor?
val preparedBias = bias?.let { tensor ->
tensor.takeIf { isOpt(it.name) } ?: LSTMContextRule.prepareBias(tensor)
}

val peepholes = inputs.getOrNull(7)
val preparedPeepholes = peepholes?.let {
contexts.graph!!.getOrNullValue(optName(it.name)) ?: LSTMContextRule.preparePeepholes(it)
} as KITensor?
val preparedPeepholes = peepholes?.let { tensor ->
tensor.takeIf { isOpt(it.name) } ?: LSTMContextRule.preparePeepholes(tensor)
}

val sequenceLens = inputs.getOrNull(4)
val initialState = inputs.getOrNull(5)
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,62 @@
package io.kinference.core.operators.tensor

import io.kinference.attribute.Attribute
import io.kinference.core.data.tensor.KITensor
import io.kinference.core.data.tensor.asTensor
import io.kinference.data.ONNXData
import io.kinference.graph.Contexts
import io.kinference.ndarray.arrays.*
import io.kinference.ndarray.extensions.isNaN.isNaN
import io.kinference.operator.*
import io.kinference.primitives.types.DataType

sealed class IsNaN(name: String, info: OperatorInfo, attributes: Map<String, Attribute<Any>>, inputs: List<String>, outputs: List<String>) :
Operator<KITensor, KITensor>(name, info, attributes, inputs, outputs) {
companion object {
private val DEFAULT_VERSION = VersionInfo(sinceVersion = 9)

operator fun invoke(name: String, version: Int?, attributes: Map<String, Attribute<Any>>, inputs: List<String>, outputs: List<String>) =
when (version ?: DEFAULT_VERSION.sinceVersion) {
in IsNaNVer9.VERSION.asRange() -> IsNaNVer9(name, attributes, inputs, outputs)
else -> error("Unsupported version of IsNaN operator: $version")
}
}
}


class IsNaNVer9(
name: String,
attributes: Map<String, Attribute<Any>>,
inputs: List<String>,
outputs: List<String>
) : IsNaN(name, INFO, attributes, inputs, outputs) {
companion object {
private val ATTRIBUTES_INFO = emptyList<AttributeInfo>()

private val INPUTS_INFO = listOf(
IOInfo(0, PRIMITIVE_DATA_TYPES, "X", differentiable = true, optional = false)
)

private val OUTPUTS_INFO = listOf(
IOInfo(0, PRIMITIVE_DATA_TYPES, "Y", differentiable = true, optional = false)
)

//Realized the latest version, but there is backward compatibility between operators
internal val VERSION = VersionInfo(sinceVersion = 9)
private val INFO = OperatorInfo("IsNaN", ATTRIBUTES_INFO, INPUTS_INFO, OUTPUTS_INFO, VERSION, OperatorInfo.DEFAULT_DOMAIN)
}

override suspend fun <D : ONNXData<*, *>> apply(contexts: Contexts<D>, inputs: List<KITensor?>): List<KITensor?> {
val input = inputs[0]!!.data

val output = when (input.type) {
DataType.FLOAT -> (input as FloatNDArray).isNaN()
DataType.DOUBLE -> (input as DoubleNDArray).isNaN()
else -> error("Unsupported type")
}

return listOf(output.asTensor("Y"))
}
}


Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
package io.kinference.operators.logical

import io.kinference.KITestEngine.KIAccuracyRunner
import io.kinference.utils.TestRunner
import kotlin.test.Test

class XorTest {
private fun getTargetPath(dirName: String) = "xor/$dirName/"

@Test
fun test_xor_2d() = TestRunner.runTest {
KIAccuracyRunner.runFromResources(getTargetPath("test_xor2d"))
}

@Test
fun test_xor_3d() = TestRunner.runTest {
KIAccuracyRunner.runFromResources(getTargetPath("test_xor3d"))
}

@Test
fun test_xor_4d() = TestRunner.runTest {
KIAccuracyRunner.runFromResources(getTargetPath("test_xor4d"))
}

@Test
fun test_xor_broadcast_3v1d() = TestRunner.runTest {
KIAccuracyRunner.runFromResources(getTargetPath("test_xor_bcast3v1d"))
}

@Test
fun test_xor_broadcast_3v2d() = TestRunner.runTest {
KIAccuracyRunner.runFromResources(getTargetPath("test_xor_bcast3v2d"))
}

@Test
fun test_xor_broadcast_4v2d() = TestRunner.runTest {
KIAccuracyRunner.runFromResources(getTargetPath("test_xor_bcast4v2d"))
}

@Test
fun test_xor_broadcast_4v3d() = TestRunner.runTest {
KIAccuracyRunner.runFromResources(getTargetPath("test_xor_bcast4v3d"))
}

@Test
fun test_xor_broadcast_4v4d() = TestRunner.runTest {
KIAccuracyRunner.runFromResources(getTargetPath("test_xor_bcast4v4d"))
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
package io.kinference.operators.operations

import io.kinference.KITestEngine.KIAccuracyRunner
import io.kinference.utils.TestRunner
import kotlin.test.Test

class IsNaNTest {
private fun getTargetPath(dirName: String) = "isnan/$dirName/"

@Test
fun test_isnan() = TestRunner.runTest {
KIAccuracyRunner.runFromResources(getTargetPath("test_isnan"))
}
}
Loading

0 comments on commit b2aafad

Please sign in to comment.