Skip to content

Commit

Permalink
Remove commented torch hack
Browse files Browse the repository at this point in the history
  • Loading branch information
frostedoyster committed Nov 13, 2024
1 parent 853ccf0 commit 3be99cc
Showing 1 changed file with 0 additions and 30 deletions.
30 changes: 0 additions & 30 deletions src/metatrain/experimental/nanopet/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,33 +11,3 @@
__maintainers__ = [
("Filippo Bigi <[email protected]>", "@frostedoyster"),
]


# This is fixing a small bug in the attention implementation
# in torch that prevents it from being torchscriptable.

# import os

# import torch


# file = os.path.join(os.path.dirname(torch.__file__), "nn", "modules", "activation.py")

# with open(file, "r") as f:
# lines = f.readlines()
# for i, line in enumerate(lines):
# if (
# "elif self.in_proj_bias is not None and query.dtype != self.in_proj_bias.dtype:" # noqa: E501
# in line
# ):
# lines[i] = line.replace(
# "elif self.in_proj_bias is not None and query.dtype != self.in_proj_bias.dtype:", # noqa: E501
# "elif self.in_proj_bias is not None:\n"
# " if query.dtype != self.in_proj_bias.dtype:",
# )
# lines[i + 1] = (
# ' why_not_fast_path = f"dtypes of query ({query.dtype}) and self.in_proj_bias ({self.in_proj_bias.dtype}) do not match"\n' # noqa: E501
# )

# with open(file, "w") as f:
# f.writelines(lines)

0 comments on commit 3be99cc

Please sign in to comment.