-
Notifications
You must be signed in to change notification settings - Fork 0
/
linear regression(one variable).py
51 lines (37 loc) · 1.11 KB
/
linear regression(one variable).py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
import numpy as np
def cost_function(X,Y,w,b):
cost = 0
for i in range(size):
cost+=((w*X[i]+b) - Y[i])**2
total_cost = cost/2*size
return total_cost
def gradient(X,Y,w,b):
dJ_dw = 0
dJ_db = 0
for i in range(size):
dJ_dw+=(w*X[i]+b - Y[i])*X[i]
dJ_db+=(w*X[i]+b - Y[i])
dJ_dw/=size
dJ_db/=size
return dJ_dw,dJ_db
def gradient_descent(X,Y,w,b,alpha,iter):
w_values = []
b_values = []
cost_values = []
iter_values = []
for i in range(iter):
dJ_dw,dJ_db = gradient(X,Y,w,b)
w = w - alpha*dJ_dw
b = b - alpha*dJ_db
cost = cost_function(X,Y,w,b)
w_values.append(w)
b_values.append(b)
cost_values.append(cost)
iter_values.append(i)
return w_values,b_values,cost_values,iter_values
X = np.asarray([1,2])
Y = np.asarray([300,500])
size = X.shape[0]
fw,fb,fcost,fiter = gradient_descent(X,Y,0,0,0.1,907)
for i in range(len(fw)):
print(f"w,b = ({fw[i]},{fb[i]}) ; iteration = {fiter[i]} ; cost function = {fcost[i]}")