Skip to content

Commit

Permalink
[FIX | FMT] RTD build, apply latest black and isort, remove cover…
Browse files Browse the repository at this point in the history
…alls (#323)

* [DOC] Fix RTD build

* [FMT] Apply `black==24.2.0`

* [DEL] Remove coveralls upload
  • Loading branch information
f-dangel authored Mar 1, 2024
1 parent e9b1dd3 commit 8e3b52b
Show file tree
Hide file tree
Showing 155 changed files with 221 additions and 84 deletions.
7 changes: 0 additions & 7 deletions .github/workflows/test.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -46,10 +46,3 @@ jobs:
if: contains('refs/heads/master refs/heads/development refs/heads/release', github.ref) != 1
run: |
make test-light
- name: Test coveralls - python ${{ matrix.python-version }}
run: coveralls --service=github
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
flag-name: run-${{ matrix.python-version }}
parallel: true
14 changes: 9 additions & 5 deletions .readthedocs.yml
Original file line number Diff line number Diff line change
Expand Up @@ -6,10 +6,14 @@ version: 2
sphinx:
configuration: docs_src/rtd/conf.py

build:
os: ubuntu-22.04
tools:
python: "3.8"

python:
version: "3.8"
install:
- method: pip
path: .
extra_requirements:
- docs
- method: pip
path: .
extra_requirements:
- docs
1 change: 1 addition & 0 deletions backpack/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""BackPACK."""

from inspect import isclass
from types import TracebackType
from typing import Callable, Optional, Tuple, Type, Union
Expand Down
1 change: 1 addition & 0 deletions backpack/context.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Context class for BackPACK."""

from typing import Callable, Iterable, List, Tuple, Type

from torch.nn import Module
Expand Down
1 change: 1 addition & 0 deletions backpack/core/derivatives/adaptive_avg_pool_nd.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Implements the derivatives for AdaptiveAvgPool."""

from typing import List, Tuple, Union
from warnings import warn

Expand Down
1 change: 1 addition & 0 deletions backpack/core/derivatives/avgpoolnd.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
Average pooling can be expressed as convolution over grouped channels with a constant
kernel.
"""

from typing import Any, List, Tuple

from einops import rearrange
Expand Down
1 change: 1 addition & 0 deletions backpack/core/derivatives/basederivatives.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Base classes for more flexible Jacobians and second-order information."""

import warnings
from abc import ABC
from typing import Callable, List, Tuple
Expand Down
1 change: 1 addition & 0 deletions backpack/core/derivatives/batchnorm_nd.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Contains derivatives for BatchNorm."""

from typing import List, Tuple, Union

from torch import Size, Tensor, einsum
Expand Down
1 change: 1 addition & 0 deletions backpack/core/derivatives/conv_transposend.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Partial derivatives for ``torch.nn.ConvTranspose{1,2,3}d``."""

from typing import List, Tuple, Union

from einops import rearrange
Expand Down
1 change: 1 addition & 0 deletions backpack/core/derivatives/crossentropyloss.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Partial derivatives for cross-entropy loss."""

from math import sqrt
from typing import Callable, Dict, List, Tuple

Expand Down
1 change: 1 addition & 0 deletions backpack/core/derivatives/dropout.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Partial derivatives for the dropout layer."""

from typing import List, Tuple

from torch import Tensor, eq, ones_like
Expand Down
1 change: 1 addition & 0 deletions backpack/core/derivatives/elu.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Partial derivatives for the ELU activation function."""

from typing import List, Tuple

from torch import Tensor, exp, le, ones_like, zeros_like
Expand Down
1 change: 1 addition & 0 deletions backpack/core/derivatives/embedding.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Derivatives for Embedding."""

from typing import List, Tuple

from torch import Tensor, einsum, zeros
Expand Down
1 change: 1 addition & 0 deletions backpack/core/derivatives/flatten.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Partial derivatives of the flatten layer."""

from typing import List, Tuple

from torch import Tensor
Expand Down
1 change: 1 addition & 0 deletions backpack/core/derivatives/leakyrelu.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Partial derivatives for the leaky ReLU layer."""

from typing import List, Tuple

from torch import Tensor, gt
Expand Down
1 change: 1 addition & 0 deletions backpack/core/derivatives/linear.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Contains partial derivatives for the ``torch.nn.Linear`` layer."""

from typing import List, Tuple

from torch import Size, Tensor, einsum
Expand Down
1 change: 1 addition & 0 deletions backpack/core/derivatives/logsigmoid.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Contains partial derivatives for the ``torch.nn.LogSigmoid`` layer."""

from typing import List, Tuple

from torch import Tensor, exp
Expand Down
1 change: 1 addition & 0 deletions backpack/core/derivatives/lstm.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Partial derivatives for nn.LSTM."""

from typing import List, Tuple

from torch import Tensor, cat, einsum, sigmoid, tanh, zeros
Expand Down
1 change: 1 addition & 0 deletions backpack/core/derivatives/nll_base.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Partial derivative bases for NLL losses."""

from math import sqrt
from typing import List, Tuple

Expand Down
1 change: 1 addition & 0 deletions backpack/core/derivatives/permute.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Module containing derivatives of Permute."""

from typing import List, Tuple

from torch import Tensor, argsort
Expand Down
1 change: 1 addition & 0 deletions backpack/core/derivatives/relu.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Partial derivatives for the ReLU activation function."""

from typing import List, Tuple

from torch import Tensor, gt
Expand Down
1 change: 1 addition & 0 deletions backpack/core/derivatives/rnn.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Partial derivatives for the torch.nn.RNN layer."""

from typing import List, Tuple

from torch import Tensor, cat, einsum, zeros
Expand Down
1 change: 1 addition & 0 deletions backpack/core/derivatives/scale_module.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Derivatives of ScaleModule (implies Identity)."""

from typing import List, Tuple, Union

from torch import Tensor
Expand Down
1 change: 1 addition & 0 deletions backpack/core/derivatives/selu.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Partial derivatives for the SELU activation function."""

from typing import List, Tuple

from torch import Tensor, exp, le, ones_like, zeros_like
Expand Down
1 change: 1 addition & 0 deletions backpack/core/derivatives/shape_check.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
Helpers to check input and output sizes of Jacobian-matrix products.
"""

import functools
from typing import Any, Callable

Expand Down
1 change: 1 addition & 0 deletions backpack/core/derivatives/sigmoid.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Partial derivatives for the Sigmoid activation function."""

from typing import List, Tuple

from torch import Tensor
Expand Down
1 change: 1 addition & 0 deletions backpack/core/derivatives/slicing.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Contains derivatives of slicing operation."""

from typing import List, Tuple

from torch import Tensor, zeros
Expand Down
1 change: 1 addition & 0 deletions backpack/core/derivatives/sum_module.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Contains derivatives for SumModule."""

from typing import List, Tuple

from torch import Tensor
Expand Down
1 change: 1 addition & 0 deletions backpack/core/derivatives/tanh.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Partial derivatives for the Tanh activation function."""

from typing import List, Tuple

from torch import Tensor
Expand Down
1 change: 1 addition & 0 deletions backpack/core/derivatives/zeropad2d.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Partial derivatives for the ZeroPad2d function."""

from typing import List, Tuple

from einops import rearrange
Expand Down
1 change: 1 addition & 0 deletions backpack/custom_module/branching.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Emulating branching with modules."""

from typing import Any, OrderedDict, Tuple, Union

from torch import Tensor
Expand Down
1 change: 1 addition & 0 deletions backpack/custom_module/graph_utils.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Transformation tools to make graph BackPACK compatible."""

from copy import deepcopy
from typing import Tuple, Union
from warnings import warn
Expand Down
1 change: 1 addition & 0 deletions backpack/custom_module/permute.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Module containing Permute module."""

from typing import Any

from torch import Tensor
Expand Down
1 change: 1 addition & 0 deletions backpack/custom_module/reduce_tuple.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Module containing ReduceTuple module."""

from typing import Union

from torch import Tensor
Expand Down
1 change: 1 addition & 0 deletions backpack/custom_module/scale_module.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Contains ScaleModule."""

from torch import Tensor
from torch.nn import Module

Expand Down
1 change: 1 addition & 0 deletions backpack/extensions/backprop_extension.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Implements the backpropagation mechanism."""

from __future__ import annotations

import abc
Expand Down
1 change: 0 additions & 1 deletion backpack/extensions/curvmatprod/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@
by Felix Dangel, Stefan Harmeling, Philipp Hennig, 2020.
"""


from .ggnmp import GGNMP
from .hmp import HMP
from .pchmp import PCHMP
Expand Down
1 change: 1 addition & 0 deletions backpack/extensions/firstorder/base.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Base class for first order extensions."""

from typing import Dict, List, Type

from torch.nn import Module
Expand Down
1 change: 1 addition & 0 deletions backpack/extensions/firstorder/batch_grad/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
It defines the module extension for each module.
"""

from typing import List

from torch.nn import (
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Calculates the batch_grad derivative."""

from __future__ import annotations

from typing import TYPE_CHECKING, Callable, List, Tuple
Expand Down
1 change: 1 addition & 0 deletions backpack/extensions/firstorder/batch_grad/batchnorm_nd.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Contains grad_batch extension for BatchNorm."""

from typing import Tuple, Union

from torch import Tensor
Expand Down
1 change: 1 addition & 0 deletions backpack/extensions/firstorder/batch_grad/embedding.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""BatchGrad extension for Embedding."""

from backpack.core.derivatives.embedding import EmbeddingDerivatives
from backpack.extensions.firstorder.batch_grad.batch_grad_base import BatchGradBase

Expand Down
1 change: 1 addition & 0 deletions backpack/extensions/firstorder/batch_grad/rnn.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Contains BatchGradRNN."""

from backpack.core.derivatives.lstm import LSTMDerivatives
from backpack.core.derivatives.rnn import RNNDerivatives
from backpack.extensions.firstorder.batch_grad.batch_grad_base import BatchGradBase
Expand Down
1 change: 1 addition & 0 deletions backpack/extensions/firstorder/batch_l2_grad/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
Defines the backpropagation extension.
Within it, define the extension for each module.
"""

from torch.nn import (
LSTM,
RNN,
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Contains Base class for batch_l2_grad."""

from __future__ import annotations

from typing import TYPE_CHECKING, Callable, List, Tuple
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Contains batch_l2 extension for BatchNorm."""

from typing import Tuple, Union

from torch import Tensor
Expand Down
1 change: 1 addition & 0 deletions backpack/extensions/firstorder/batch_l2_grad/convnd.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""batch_l2 extension for Conv."""

from torch import einsum

from backpack.core.derivatives.conv1d import Conv1DDerivatives
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""batch_l2 extension for ConvTranspose."""

from torch import einsum

from backpack.core.derivatives.conv_transpose1d import ConvTranspose1DDerivatives
Expand Down
1 change: 1 addition & 0 deletions backpack/extensions/firstorder/batch_l2_grad/embedding.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""BatchL2 extension for Embedding."""

from backpack.core.derivatives.embedding import EmbeddingDerivatives
from backpack.extensions.firstorder.batch_l2_grad.batch_l2_base import BatchL2Base

Expand Down
1 change: 1 addition & 0 deletions backpack/extensions/firstorder/batch_l2_grad/linear.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Contains batch_l2 extension for Linear."""

from __future__ import annotations

from typing import TYPE_CHECKING, Tuple
Expand Down
1 change: 1 addition & 0 deletions backpack/extensions/firstorder/batch_l2_grad/rnn.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Contains BatchL2RNN."""

from backpack.core.derivatives.lstm import LSTMDerivatives
from backpack.core.derivatives.rnn import RNNDerivatives
from backpack.extensions.firstorder.batch_l2_grad.batch_l2_base import BatchL2Base
Expand Down
1 change: 1 addition & 0 deletions backpack/extensions/firstorder/gradient/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,4 +2,5 @@

It calculates the same result as torch backward().
"""

# TODO: Rewrite variance to not need this extension
1 change: 1 addition & 0 deletions backpack/extensions/firstorder/gradient/base.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Calculates the gradient."""

from backpack.extensions.firstorder.base import FirstOrderModuleExtension


Expand Down
1 change: 1 addition & 0 deletions backpack/extensions/firstorder/gradient/batchnorm_nd.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Gradient extension for BatchNorm."""

from typing import Tuple, Union

from torch import Tensor
Expand Down
1 change: 1 addition & 0 deletions backpack/extensions/firstorder/gradient/embedding.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Gradient extension for Embedding."""

from backpack.core.derivatives.embedding import EmbeddingDerivatives
from backpack.extensions.firstorder.gradient.base import GradBaseModule

Expand Down
1 change: 1 addition & 0 deletions backpack/extensions/firstorder/gradient/rnn.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Contains GradRNN."""

from backpack.core.derivatives.lstm import LSTMDerivatives
from backpack.core.derivatives.rnn import RNNDerivatives
from backpack.extensions.firstorder.gradient.base import GradBaseModule
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

Defines module extension for each module.
"""

from torch.nn import (
LSTM,
RNN,
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""SGS extension for BatchNorm."""

from typing import Tuple, Union

from torch import Tensor
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""SGS extension for Embedding."""

from backpack.core.derivatives.embedding import EmbeddingDerivatives
from backpack.extensions.firstorder.sum_grad_squared.sgs_base import SGSBase

Expand Down
1 change: 1 addition & 0 deletions backpack/extensions/firstorder/sum_grad_squared/rnn.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Contains SGSRNN module."""

from backpack.core.derivatives.lstm import LSTMDerivatives
from backpack.core.derivatives.rnn import RNNDerivatives
from backpack.extensions.firstorder.sum_grad_squared.sgs_base import SGSBase
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Contains SGSBase, the base module for sum_grad_squared extension."""

from __future__ import annotations

from typing import TYPE_CHECKING, Callable, List, Tuple
Expand Down
Loading

0 comments on commit 8e3b52b

Please sign in to comment.