diff --git a/machine_learning/neural_network/optimizers/__init__.py b/machine_learning/neural_network/optimizers/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/machine_learning/neural_network/optimizers/sgd.py b/machine_learning/neural_network/optimizers/sgd.py new file mode 100644 index 000000000000..093c0f5ba6c1 --- /dev/null +++ b/machine_learning/neural_network/optimizers/sgd.py @@ -0,0 +1,24 @@ +""" +Stochastic Gradient Descent (SGD) optimizer. +""" + +from typing import List + + +def sgd_update(weights: List[float], grads: List[float], lr: float) -> List[float]: + """ + Update weights using SGD. + + Args: + weights (List[float]): Current weights + grads (List[float]): Gradients + lr (float): Learning rate + + Returns: + List[float]: Updated weights + + Example: + >>> sgd_update([0.5, -0.2], [0.1, -0.1], 0.01) + [0.499, -0.199] + """ + return [w - lr * g for w, g in zip(weights, grads)] diff --git a/machine_learning/neural_network/optimizers/test_sgd.py b/machine_learning/neural_network/optimizers/test_sgd.py new file mode 100644 index 000000000000..53aa5a2f93ab --- /dev/null +++ b/machine_learning/neural_network/optimizers/test_sgd.py @@ -0,0 +1,13 @@ +from sgd import sgd_update + + +def test_sgd(): + weights = [0.5, -0.2] + grads = [0.1, -0.1] + updated = sgd_update(weights, grads, lr=0.01) + assert updated == [0.499, -0.199], f"Expected [0.499, -0.199], got {updated}" + + +if __name__ == "__main__": + test_sgd() + print("SGD test passed!") diff --git a/test_sgd.py b/test_sgd.py new file mode 100644 index 000000000000..5878d17e7765 --- /dev/null +++ b/test_sgd.py @@ -0,0 +1,13 @@ +from neural_network.optimizers.sgd import sgd_update + + +def test_sgd(): + weights = [0.5, -0.2] + grads = [0.1, -0.1] + updated = sgd_update(weights, grads, lr=0.01) + assert updated == [0.499, -0.199], f"Expected [0.499, -0.199], got {updated}" + + +if __name__ == "__main__": + test_sgd() + print("SGD test passed!")