Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

2024 #16

Open
wants to merge 4 commits into
base: master
Choose a base branch
from
Open

2024 #16

Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
40 changes: 11 additions & 29 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -28,35 +28,17 @@ repos:
- id: requirements-txt-fixer
- id: trailing-whitespace

- repo: https://github.com/timothycrosley/isort
rev: 5.10.1
- repo: https://github.com/astral-sh/ruff-pre-commit
# Ruff version.
rev: v0.6.1
hooks:
- id: isort
# Run the linter.
- id: ruff
args: [ --fix ]
# Run the formatter.
- id: ruff-format

- repo: https://github.com/pre-commit/mirrors-mypy
rev: v0.971
- repo: https://github.com/RobertCraigie/pyright-python
rev: v1.1.376
hooks:
- id: mypy
exclude: ^(docs/)|(project/)|(assignments/)


# Black, the code formatter, natively supports pre-commit
- repo: https://github.com/psf/black
rev: 22.6.0
hooks:
- id: black

# Flake8 also supports pre-commit natively (same author)
- repo: https://github.com/PyCQA/flake8
rev: 5.0.4
hooks:
- id: flake8
additional_dependencies:
- pep8-naming
exclude: ^(docs/)|(assignments/)

# Doc linters
- repo: https://github.com/terrencepreilly/darglint
rev: v1.8.1
hooks:
- id: darglint
- id: pyright
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -14,4 +14,4 @@ python sync_previous_module.py previous-module-dir current-module-dir

The files that will be synced are:

minitorch/operators.py minitorch/module.py tests/test_module.py tests/test_operators.py project/run_manual.py
minitorch/operators.py minitorch/module.py tests/test_module.py tests/test_operators.py project/run_manual.py
8 changes: 4 additions & 4 deletions minitorch/__init__.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
from .testing import MathTest, MathTestVariable # type: ignore # noqa: F401,F403
from .autodiff import * # noqa: F401,F403
from .datasets import * # noqa: F401,F403
from .module import * # noqa: F401,F403
from .optim import * # noqa: F401,F403
from .scalar import * # noqa: F401,F403
from .scalar_functions import * # noqa: F401,F403
from .optim import * # noqa: F401,F403
from .datasets import * # noqa: F401,F403
from .testing import * # noqa: F401,F403
from .testing import MathTest, MathTestVariable # type: ignore # noqa: F401,F403
from .module import * # noqa: F401,F403
46 changes: 22 additions & 24 deletions minitorch/autodiff.py
Original file line number Diff line number Diff line change
@@ -1,26 +1,29 @@
from __future__ import annotations

from dataclasses import dataclass
from typing import Any, Iterable, List, Tuple
from typing import Any, Iterable, List, Tuple, Protocol

from typing_extensions import Protocol

# ## Task 1.1
# Central Difference calculation


def central_difference(f: Any, *vals: Any, arg: int = 0, epsilon: float = 1e-6) -> Any:
r"""
Computes an approximation to the derivative of `f` with respect to one arg.
r"""Computes an approximation to the derivative of `f` with respect to one arg.

See :doc:`derivative` or https://en.wikipedia.org/wiki/Finite_difference for more details.

Args:
----
f : arbitrary function from n-scalar args to one value
*vals : n-float values $x_0 \ldots x_{n-1}$
arg : the number $i$ of the arg to compute the derivative
epsilon : a small constant

Returns:
-------
An approximation of $f'_i(x_0, \ldots, x_{n-1})$

"""
# TODO: Implement for Task 1.1.
raise NotImplementedError("Need to implement for Task 1.1")
Expand All @@ -30,67 +33,62 @@ def central_difference(f: Any, *vals: Any, arg: int = 0, epsilon: float = 1e-6)


class Variable(Protocol):
def accumulate_derivative(self, x: Any) -> None:
pass
def accumulate_derivative(self, x: Any) -> None: ...

@property
def unique_id(self) -> int:
pass
def unique_id(self) -> int: ...

def is_leaf(self) -> bool:
pass
def is_leaf(self) -> bool: ...

def is_constant(self) -> bool:
pass
def is_constant(self) -> bool: ...

@property
def parents(self) -> Iterable["Variable"]:
pass
def parents(self) -> Iterable["Variable"]: ...

def chain_rule(self, d_output: Any) -> Iterable[Tuple["Variable", Any]]:
pass
def chain_rule(self, d_output: Any) -> Iterable[Tuple[Variable, Any]]: ...


def topological_sort(variable: Variable) -> Iterable[Variable]:
"""
Computes the topological order of the computation graph.
"""Computes the topological order of the computation graph.

Args:
----
variable: The right-most variable

Returns:
-------
Non-constant Variables in topological order starting from the right.

"""
# TODO: Implement for Task 1.4.
raise NotImplementedError("Need to implement for Task 1.4")


def backpropagate(variable: Variable, deriv: Any) -> None:
"""
Runs backpropagation on the computation graph in order to
"""Runs backpropagation on the computation graph in order to
compute derivatives for the leave nodes.

Args:
----
variable: The right-most variable
deriv : Its derivative that we want to propagate backward to the leaves.

No return. Should write to its results to the derivative values of each leaf through `accumulate_derivative`.

"""
# TODO: Implement for Task 1.4.
raise NotImplementedError("Need to implement for Task 1.4")


@dataclass
class Context:
"""
Context class is used by `Function` to store information during the forward pass.
"""
"""Context class is used by `Function` to store information during the forward pass."""

no_grad: bool = False
saved_values: Tuple[Any, ...] = ()

def save_for_backward(self, *values: Any) -> None:
"Store the given `values` if they need to be used during backpropagation."
"""Store the given `values` if they need to be used during backpropagation."""
if self.no_grad:
return
self.saved_values = values
Expand Down
33 changes: 17 additions & 16 deletions minitorch/module.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,11 @@


class Module:
"""
Modules form a tree that store parameters and other
"""Modules form a tree that store parameters and other
submodules. They make up the basis of neural network stacks.

Attributes:
Attributes
----------
_modules : Storage of the child modules
_parameters : Storage of the module's parameters
training : Whether the module is in training mode or evaluation mode
Expand All @@ -25,42 +25,44 @@ def __init__(self) -> None:
self.training = True

def modules(self) -> Sequence[Module]:
"Return the direct child modules of this module."
"""Return the direct child modules of this module."""
m: Dict[str, Module] = self.__dict__["_modules"]
return list(m.values())

def train(self) -> None:
"Set the mode of this module and all descendent modules to `train`."
"""Set the mode of this module and all descendent modules to `train`."""
raise NotImplementedError("Need to include this file from past assignment.")

def eval(self) -> None:
"Set the mode of this module and all descendent modules to `eval`."
"""Set the mode of this module and all descendent modules to `eval`."""
raise NotImplementedError("Need to include this file from past assignment.")

def named_parameters(self) -> Sequence[Tuple[str, Parameter]]:
"""
Collect all the parameters of this module and its descendents.
"""Collect all the parameters of this module and its descendents.


Returns:
Returns
-------
The name and `Parameter` of each ancestor parameter.

"""
raise NotImplementedError("Need to include this file from past assignment.")

def parameters(self) -> Sequence[Parameter]:
"Enumerate over all the parameters of this module and its descendents."
"""Enumerate over all the parameters of this module and its descendents."""
raise NotImplementedError("Need to include this file from past assignment.")

def add_parameter(self, k: str, v: Any) -> Parameter:
"""
Manually add a parameter. Useful helper for scalar parameters.
"""Manually add a parameter. Useful helper for scalar parameters.

Args:
----
k: Local name of the parameter.
v: Value for the parameter.

Returns:
-------
Newly created parameter.

"""
val = Parameter(v, k)
self.__dict__["_parameters"][k] = val
Expand Down Expand Up @@ -114,8 +116,7 @@ def _addindent(s_: str, numSpaces: int) -> str:


class Parameter:
"""
A Parameter is a special container stored in a `Module`.
"""A Parameter is a special container stored in a `Module`.

It is designed to hold a `Variable`, but we allow it to hold
any value for testing.
Expand All @@ -130,7 +131,7 @@ def __init__(self, x: Any, name: Optional[str] = None) -> None:
self.value.name = self.name

def update(self, x: Any) -> None:
"Update the parameter value."
"""Update the parameter value."""
self.value = x
if hasattr(x, "requires_grad_"):
self.value.requires_grad_(True)
Expand Down
Loading