-
Notifications
You must be signed in to change notification settings - Fork 2
/
Copy pathLinearRegressionWithRegularization.fs
42 lines (27 loc) · 1.46 KB
/
LinearRegressionWithRegularization.fs
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
module LinearRegressionWithRegularization
open MathNet.Numerics.LinearAlgebra
let J (X, y) λ (θ: Vector<float>) =
assert (Matrix.rowCount X = Vector.length y)
let X = X.InsertColumn(0, DenseVector.create (Matrix.rowCount X) 1.)
let m = Matrix.rowCount X |> float
let θforReg = θ.Clone()
θforReg.[0] <- 0.
let errors = X * θ - y
1. / (2. * m) * ((errors * errors) + λ * (θforReg * θforReg))
let innerGradientDescent iterationFunction α maxIterations (λ: float) (X, y) =
assert (Matrix.rowCount X = Vector.length y)
let X = X.InsertColumn(0, DenseVector.create (Matrix.rowCount X) 1.)
let m = Matrix.rowCount X |> float
let iteration (θ: Vector<float>) =
let θforReg = θ.Clone()
θforReg.[0] <- 0.
let errors = X.Transpose() * (X * θ - y)
θ - (α / m) * errors - λ / m * θforReg
DenseVector.create (Matrix.columnCount X) 0. |> iterationFunction iteration maxIterations
let gradientDescent α = innerGradientDescent Iteration.iterateUntilConvergence α
let gradientDescentWithIntermediateResults α = innerGradientDescent Iteration.iterateUntilConvergenceWithIntermediateResults α
let normalEquation λ (X, y) =
assert (Matrix.rowCount X = Vector.length y)
let X = X.InsertColumn(0, DenseVector.create (Matrix.rowCount X) 1.)
let X' = X.Transpose()
Matrix.inverse (X' * X + λ * DenseMatrix.identity<float> (X.ColumnCount)) * X' * y