sgd example python
Example 1: sgd code python
1import numpy as np
2
3def gradient_descent(
4 gradient, x, y, start, learn_rate=0.1, n_iter=50, tolerance=1e-06,
5 dtype="float64"
6):
7
8 if not callable(gradient):
9 raise TypeError("'gradient' must be callable")
10
11
12 dtype_ = np.dtype(dtype)
13
14
15 x, y = np.array(x, dtype=dtype_), np.array(y, dtype=dtype_)
16 if x.shape[0] != y.shape[0]:
17 raise ValueError("'x' and 'y' lengths do not match")
18
19
20 vector = np.array(start, dtype=dtype_)
21
22
23 learn_rate = np.array(learn_rate, dtype=dtype_)
24 if np.any(learn_rate <= 0):
25 raise ValueError("'learn_rate' must be greater than zero")
26
27
28 n_iter = int(n_iter)
29 if n_iter <= 0:
30 raise ValueError("'n_iter' must be greater than zero")
31
32
33 tolerance = np.array(tolerance, dtype=dtype_)
34 if np.any(tolerance <= 0):
35 raise ValueError("'tolerance' must be greater than zero")
36
37
38 for _ in range(n_iter):
39
40 diff = -learn_rate * np.array(gradient(x, y, vector), dtype_)
41
42
43 if np.all(np.abs(diff) <= tolerance):
44 break
45
46
47 vector += diff
48
49 return vector if vector.shape else vector.item()
Example 2: sgd in python
>>> import tensorflow as tf
>>>
>>> sgd = tf.keras.optimizers.SGD(learning_rate=0.1, momentum=0.9)
>>> var = tf.Variable(2.5)
>>> cost = lambda: 2 + var ** 2
>>>
>>> for _ in range(100):
... sgd.minimize(cost, var_list=[var])
>>>
>>> var.numpy()
-0.007128528
>>> cost().numpy()
2.0000508