Package-level declarations
Types
Link copied to clipboard
data class KanConfig(val inputDim: Int, val outputDim: Int, val gridSize: Int = 16, val degree: Int = 3, val useBias: Boolean = true, val useResidual: Boolean = false, val regularization: KanRegularization = KanRegularization(), val gridMin: Float = 0.0f, val gridMax: Float = (PI / 2).toFloat())
Configuration for a Kolmogorov–Arnold Network layer.
Link copied to clipboard
Link copied to clipboard
class KanLayer<T : DType, V>(val config: KanConfig, val baseActivation: (Tensor<T, V>) -> Tensor<T, V>, initMixingWeights: Tensor<T, V>, initBasis: Tensor<T, V>, initBias: Tensor<T, V>?, val name: String) : Module<T, V> , ModuleParameters<T, V>
Stub implementation for a Kolmogorov–Arnold Network layer.
Link copied to clipboard
Simple regularization hints for future spline/mixing penalties.
Functions
Link copied to clipboard
fun <T : DType, V> createAkn(executionContext: ExecutionContext, dtype: KClass<T>, inputDim: Int, outputDim: Int, gridSize: Int = 16, degree: Int = 3, useBias: Boolean = true, useResidual: Boolean = false, name: String = "akn", baseActivation: (Tensor<T, V>) -> Tensor<T, V> = { it }, weightsInit: WeightsScope<T, V>.(Shape) -> Tensor<T, V> = { randn(std = 0.02f) }, basisInit: WeightsScope<T, V>.(Shape) -> Tensor<T, V> = { uniform(min = -0.5f, max = 0.5f) }, biasInit: BiasScope<T, V>.(Shape) -> Tensor<T, V> = { zeros() }): Akn<T, V>
Factory to create an AKN module directly (without the kanLayer DSL helper). Mirrors defaults and initializer hooks used by the DSL.
Link copied to clipboard
inline fun <T : DType, V> NeuralNetworkDsl<T, V>.kanLayer(outputDim: Int, gridSize: Int = 16, degree: Int = 3, useBias: Boolean = true, useResidual: Boolean = false, id: String = "", noinline baseActivation: (Tensor<T, V>) -> Tensor<T, V> = { it }, content: KAN<T, V>.() -> Unit = {})
Adds a KAN layer to the existing network DSL.