Skip to content

Commit

Permalink
Install dependencies for website build (#558)
Browse files Browse the repository at this point in the history
Summary:
As reported in #557, API reference page on our website was broken (nothing to link because it's fixed now)

Here's an [example](https://github.com/pytorch/opacus/actions/runs/3992500542/jobs/6848405307) of a failed build:

```
WARNING: autodoc: failed to import module 'accountants.gdp' from module 'opacus'; the following exception was raised:
No module named 'numpy'
WARNING: autodoc: failed to import module 'accountants.accountant' from module 'opacus'; the following exception was raised:
No module named 'numpy'
WARNING: autodoc: failed to import module 'accountants.rdp' from module 'opacus'; the following exception was raised:
No module named 'numpy'
WARNING: autodoc: failed to import module 'accountants.utils' from module 'opacus'; the following exception was raised:
No module named 'numpy'
```

This PR updates dependencies for the website building action

Pull Request resolved: #558

Reviewed By: karthikprasad

Differential Revision: D42997411

Pulled By: ffuuugor

fbshipit-source-id: a2b925bf66fdca9e48c4271071615f2a1c0e0f84
  • Loading branch information
Igor Shilov authored and facebook-github-bot committed Feb 7, 2023
1 parent d888fd0 commit affcf22
Show file tree
Hide file tree
Showing 24 changed files with 5 additions and 25 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/deploy.yml
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ jobs:
- name: Install dependencies
run: |
python -m pip install --upgrade pip
python -m pip install sphinx sphinx-autodoc-typehints nbsphinx bs4
python -m pip install opacus sphinx sphinx-autodoc-typehints nbsphinx bs4
yarn install
- name: Build website
run: ./scripts/build_website.sh -b
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/test-deploy.yml
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ jobs:
- name: Install dependencies
run: |
python -m pip install --upgrade pip
python -m pip install sphinx sphinx-autodoc-typehints nbsphinx bs4
python -m pip install opacus sphinx sphinx-autodoc-typehints nbsphinx bs4
yarn install
- name: Test build website
run: ./scripts/build_website.sh -b
1 change: 0 additions & 1 deletion benchmarks/benchmark_layer.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,6 @@ def run_layer_benchmark(


def main(args) -> None:

with open(args.config_file) as config_file:
config = json.load(config_file)

Expand Down
1 change: 0 additions & 1 deletion benchmarks/run_benchmarks.py
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,6 @@ def run_and_save_benchmark(


def main(args) -> None:

if args.verbose:
logger.setLevel(logging.DEBUG)

Expand Down
1 change: 0 additions & 1 deletion benchmarks/tests/test_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -150,7 +150,6 @@ def test_get_path(config: Dict[str, Any], path: str) -> None:
def pickle_data_and_config(
config: Dict[str, Any], root: str, suffix: str
) -> Tuple[Dict[str, Any], Dict[str, Any]]:

# setup test directory and save results to pickle file
os.mkdir(root)
try:
Expand Down
3 changes: 1 addition & 2 deletions examples/cifar10.py
Original file line number Diff line number Diff line change
Expand Up @@ -159,7 +159,6 @@ def compute_loss_stateless_model(params, sample, target):
params = list(model.parameters())

for i, (images, target) in enumerate(tqdm(train_loader)):

images = images.to(device)
target = target.to(device)

Expand All @@ -172,7 +171,7 @@ def compute_loss_stateless_model(params, sample, target):
)
per_sample_grads = [g.detach() for g in per_sample_grads]
loss = torch.mean(per_sample_losses)
for (p, g) in zip(params, per_sample_grads):
for p, g in zip(params, per_sample_grads):
p.grad_sample = g
else:
loss = criterion(output, target)
Expand Down
2 changes: 1 addition & 1 deletion examples/vision_benchmark.py
Original file line number Diff line number Diff line change
Expand Up @@ -134,7 +134,7 @@ def main(): # noqa: C901
start = time.time()
data_time = 0
data_end = time.time()
for (images, target) in tqdm(train_loader):
for images, target in tqdm(train_loader):
data_time += time.time() - data_end
images = images.cuda(non_blocking=True)

Expand Down
2 changes: 1 addition & 1 deletion opacus/optimizers/perlayeroptimizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ def __init__(
)

def clip_and_accumulate(self):
for (p, max_grad_norm) in zip(self.params, self.max_grad_norms):
for p, max_grad_norm in zip(self.params, self.max_grad_norms):
_check_processed_flag(p.grad_sample)

grad_sample = self._get_flat_grad_sample(p)
Expand Down
1 change: 0 additions & 1 deletion opacus/tests/accountants_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -138,7 +138,6 @@ def test_get_noise_multiplier_prv_steps(self):
)
@settings(deadline=10000)
def test_get_noise_multiplier_overshoot(self, epsilon, epochs, sample_rate, delta):

noise_multiplier = get_noise_multiplier(
target_epsilon=epsilon,
target_delta=delta,
Expand Down
1 change: 0 additions & 1 deletion opacus/tests/ddp_hook_check.py
Original file line number Diff line number Diff line change
Expand Up @@ -298,7 +298,6 @@ def test_ddp_hook(self):
)

def test_add_remove_ddp_hooks(self):

remaining_hooks = {
"attached": None,
"detached": None,
Expand Down
1 change: 0 additions & 1 deletion opacus/tests/dp_layers/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -352,7 +352,6 @@ def _check_packed_sequence(
rtol: float,
failure_msgs: Optional[Sequence] = None,
) -> bool:

try:
padded_seq_nn, seq_lens_nn = pad_packed_sequence(nn_out, batch_first_nn)
except ValueError:
Expand Down
1 change: 0 additions & 1 deletion opacus/tests/grad_sample_module_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,6 @@ def name(self):


class GradSampleModuleTest(unittest.TestCase):

CLS = GradSampleModule

def setUp(self):
Expand Down
1 change: 0 additions & 1 deletion opacus/tests/grad_samples/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -249,7 +249,6 @@ def run_test(

for grad_sample_mode in grad_sample_modes:
for loss_reduction in ["sum", "mean"]:

with self.subTest(
grad_sample_mode=grad_sample_mode, loss_reduction=loss_reduction
):
Expand Down
1 change: 0 additions & 1 deletion opacus/tests/grad_samples/conv1d_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,6 @@ def test_conv1d(
dilation: int,
groups: int,
):

if padding == "same" and stride != 1:
return
out_channels = out_channels_mapper(C)
Expand Down
1 change: 0 additions & 1 deletion opacus/tests/grad_samples/conv3d_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,6 @@ def test_conv3d(
dilation: int,
groups: int,
):

if padding == "same" and stride != 1:
return
out_channels = out_channels_mapper(C)
Expand Down
1 change: 0 additions & 1 deletion opacus/tests/grad_samples/dp_multihead_attention_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,6 @@ def test_multihead_attention(
add_zero_attn: bool,
kv_dim: bool,
):

if kv_dim:
kdim, vdim = D, D
else:
Expand Down
1 change: 0 additions & 1 deletion opacus/tests/grad_samples/embedding_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,6 @@ def test_input_across_dims(
dim: int,
batch_first: bool,
):

if dim == 1: # TODO: fix when dim is 1
size = [T]
elif dim == 2:
Expand Down
1 change: 0 additions & 1 deletion opacus/tests/grad_samples/group_norm_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,6 @@ def test_3d_input_groups(
W: int,
num_groups: Union[int, str],
):

if num_groups == "C":
num_groups = C

Expand Down
1 change: 0 additions & 1 deletion opacus/tests/grad_samples/instance_norm1d_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,6 @@ def test_3d_input(
C: int,
W: int,
):

x = torch.randn([N, C, W])
norm = nn.InstanceNorm1d(num_features=C, affine=True, track_running_stats=False)
self.run_test(x, norm, batch_first=True)
1 change: 0 additions & 1 deletion opacus/tests/grad_samples/instance_norm2d_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,6 @@ def test_4d_input(
W: int,
H: int,
):

x = torch.randn([N, C, H, W])
norm = nn.InstanceNorm2d(num_features=C, affine=True, track_running_stats=False)
self.run_test(x, norm, batch_first=True)
1 change: 0 additions & 1 deletion opacus/tests/grad_samples/layer_norm_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,6 @@ def test_input_norm(
input_dim: int,
norm_dim: int,
):

if norm_dim >= input_dim:
return
if norm_dim == 1:
Expand Down
1 change: 0 additions & 1 deletion opacus/tests/grad_samples/linear_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,6 @@ def test_input_bias(
bias: bool,
batch_first: bool,
):

if input_dim == 2:
if not batch_first:
return # see https://github.com/pytorch/opacus/pull/265
Expand Down
1 change: 0 additions & 1 deletion opacus/tests/grad_samples/sequence_bias_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,6 @@ def test_batch_second(
D: int,
batch_first: bool,
):

seqbias = SequenceBias(D, batch_first)
if batch_first:
x = torch.randn([N, T, D])
Expand Down
1 change: 0 additions & 1 deletion opacus/tests/privacy_engine_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -150,7 +150,6 @@ def _train_steps(
dl: DataLoader,
max_steps: Optional[int] = None,
):

steps = 0
epochs = 1 if max_steps is None else math.ceil(max_steps / len(dl))

Expand Down

0 comments on commit affcf22

Please sign in to comment.