-
Notifications
You must be signed in to change notification settings - Fork 0
/
gradient_descent.rb
60 lines (45 loc) · 1.33 KB
/
gradient_descent.rb
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
require 'csv'
class GradientDescent
def execute(iterations: 1000, rate: 0.0001, b: 0.0, m: 0.0)
points = { x: [], y: [] }
CSV.foreach('data.csv') do |row|
points[:x] << row[0].to_f
points[:y] << row[1].to_f
end
init_e = compute_error(b, m, points)
puts "Starting at b = #{b}, m = #{m}, error = #{init_e}"
b, m = gradient_descent(points, b, m, rate, iterations)
e = compute_error(b, m, points)
puts "After #{iterations} iterations, b = #{b}, m = #{m}, error = #{e}"
end
private
def gradient_descent(points, b, m, rate, iterations)
iterations.times do
b, m = step_gradient(points, b, m, rate)
end
[b, m]
end
def step_gradient(points, b, m, rate)
b_gradient, m_gradient = 0, 0
length = points[:x].size
length.times do |i|
x = points[:x].at(i)
y = points[:y].at(i)
b_gradient += -(2.0/length) * (y - ((m * x) + b))
m_gradient += -(2.0/length) * x * (y - ((m * x) + b))
end
new_b = b - (rate * b_gradient)
new_m = m - (rate * m_gradient)
[new_b, new_m]
end
def compute_error(b, m, points)
total_error = 0
points[:x].size.times do |i|
x = points[:x].at(i)
y = points[:y].at(i)
total_error += (y - (m * x + b)) ** 2
end
total_error/points[:x].size
end
end
GradientDescent.new.execute