sgd code python code example

Example: sgd code python

1import numpy as np
 2
 3def gradient_descent(
 4    gradient, x, y, start, learn_rate=0.1, n_iter=50, tolerance=1e-06,
 5    dtype="float64"
 6):
 7    # Checking if the gradient is callable
 8    if not callable(gradient):
 9        raise TypeError("'gradient' must be callable")
10
11    # Setting up the data type for NumPy arrays
12    dtype_ = np.dtype(dtype)
13
14    # Converting x and y to NumPy arrays
15    x, y = np.array(x, dtype=dtype_), np.array(y, dtype=dtype_)
16    if x.shape[0] != y.shape[0]:
17        raise ValueError("'x' and 'y' lengths do not match")
18
19    # Initializing the values of the variables
20    vector = np.array(start, dtype=dtype_)
21
22    # Setting up and checking the learning rate
23    learn_rate = np.array(learn_rate, dtype=dtype_)
24    if np.any(learn_rate <= 0):
25        raise ValueError("'learn_rate' must be greater than zero")
26
27    # Setting up and checking the maximal number of iterations
28    n_iter = int(n_iter)
29    if n_iter <= 0:
30        raise ValueError("'n_iter' must be greater than zero")
31
32    # Setting up and checking the tolerance
33    tolerance = np.array(tolerance, dtype=dtype_)
34    if np.any(tolerance <= 0):
35        raise ValueError("'tolerance' must be greater than zero")
36
37    # Performing the gradient descent loop
38    for _ in range(n_iter):
39        # Recalculating the difference
40        diff = -learn_rate * np.array(gradient(x, y, vector), dtype_)
41
42        # Checking if the absolute difference is small enough
43        if np.all(np.abs(diff) <= tolerance):
44            break
45
46        # Updating the values of the variables
47        vector += diff
48
49    return vector if vector.shape else vector.item()