From 089c51ebd6c590bebf8d691a9e2ba04216c273e2 Mon Sep 17 00:00:00 2001 From: Madhavan Singh Parihar Date: Wed, 22 Oct 2025 12:27:34 +0530 Subject: [PATCH 1/2] feat: add SGD optimizer with unit test and doctest --- .../neural_network/optimizers/__init__.py | 0 .../neural_network/optimizers/sgd.py | 23 +++++++++++++++++++ .../neural_network/optimizers/test_sgd.py | 11 +++++++++ test_sgd.py | 11 +++++++++ 4 files changed, 45 insertions(+) create mode 100644 machine_learning/neural_network/optimizers/__init__.py create mode 100644 machine_learning/neural_network/optimizers/sgd.py create mode 100644 machine_learning/neural_network/optimizers/test_sgd.py create mode 100644 test_sgd.py diff --git a/machine_learning/neural_network/optimizers/__init__.py b/machine_learning/neural_network/optimizers/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/machine_learning/neural_network/optimizers/sgd.py b/machine_learning/neural_network/optimizers/sgd.py new file mode 100644 index 000000000000..0fb59a70c83e --- /dev/null +++ b/machine_learning/neural_network/optimizers/sgd.py @@ -0,0 +1,23 @@ +""" +Stochastic Gradient Descent (SGD) optimizer. +""" + +from typing import List + +def sgd_update(weights: List[float], grads: List[float], lr: float) -> List[float]: + """ + Update weights using SGD. + + Args: + weights (List[float]): Current weights + grads (List[float]): Gradients + lr (float): Learning rate + + Returns: + List[float]: Updated weights + + Example: + >>> sgd_update([0.5, -0.2], [0.1, -0.1], 0.01) + [0.499, -0.199] + """ + return [w - lr * g for w, g in zip(weights, grads)] diff --git a/machine_learning/neural_network/optimizers/test_sgd.py b/machine_learning/neural_network/optimizers/test_sgd.py new file mode 100644 index 000000000000..f3daf1c4b16b --- /dev/null +++ b/machine_learning/neural_network/optimizers/test_sgd.py @@ -0,0 +1,11 @@ +from sgd import sgd_update + +def test_sgd(): + weights = [0.5, -0.2] + grads = [0.1, -0.1] + updated = sgd_update(weights, grads, lr=0.01) + assert updated == [0.499, -0.199], f"Expected [0.499, -0.199], got {updated}" + +if __name__ == "__main__": + test_sgd() + print("SGD test passed!") diff --git a/test_sgd.py b/test_sgd.py new file mode 100644 index 000000000000..1d489b4baf95 --- /dev/null +++ b/test_sgd.py @@ -0,0 +1,11 @@ +from neural_network.optimizers.sgd import sgd_update + +def test_sgd(): + weights = [0.5, -0.2] + grads = [0.1, -0.1] + updated = sgd_update(weights, grads, lr=0.01) + assert updated == [0.499, -0.199], f"Expected [0.499, -0.199], got {updated}" + +if __name__ == "__main__": + test_sgd() + print("SGD test passed!") From dc72e8772d3ea1181132cfd65b27dd32c3e09f76 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 22 Oct 2025 07:02:45 +0000 Subject: [PATCH 2/2] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- machine_learning/neural_network/optimizers/sgd.py | 1 + machine_learning/neural_network/optimizers/test_sgd.py | 2 ++ test_sgd.py | 2 ++ 3 files changed, 5 insertions(+) diff --git a/machine_learning/neural_network/optimizers/sgd.py b/machine_learning/neural_network/optimizers/sgd.py index 0fb59a70c83e..093c0f5ba6c1 100644 --- a/machine_learning/neural_network/optimizers/sgd.py +++ b/machine_learning/neural_network/optimizers/sgd.py @@ -4,6 +4,7 @@ from typing import List + def sgd_update(weights: List[float], grads: List[float], lr: float) -> List[float]: """ Update weights using SGD. diff --git a/machine_learning/neural_network/optimizers/test_sgd.py b/machine_learning/neural_network/optimizers/test_sgd.py index f3daf1c4b16b..53aa5a2f93ab 100644 --- a/machine_learning/neural_network/optimizers/test_sgd.py +++ b/machine_learning/neural_network/optimizers/test_sgd.py @@ -1,11 +1,13 @@ from sgd import sgd_update + def test_sgd(): weights = [0.5, -0.2] grads = [0.1, -0.1] updated = sgd_update(weights, grads, lr=0.01) assert updated == [0.499, -0.199], f"Expected [0.499, -0.199], got {updated}" + if __name__ == "__main__": test_sgd() print("SGD test passed!") diff --git a/test_sgd.py b/test_sgd.py index 1d489b4baf95..5878d17e7765 100644 --- a/test_sgd.py +++ b/test_sgd.py @@ -1,11 +1,13 @@ from neural_network.optimizers.sgd import sgd_update + def test_sgd(): weights = [0.5, -0.2] grads = [0.1, -0.1] updated = sgd_update(weights, grads, lr=0.01) assert updated == [0.499, -0.199], f"Expected [0.499, -0.199], got {updated}" + if __name__ == "__main__": test_sgd() print("SGD test passed!")