diff --git a/torch_harmonics/distributed/distributed_sht.py b/torch_harmonics/distributed/distributed_sht.py index 282755a..15cf005 100644 --- a/torch_harmonics/distributed/distributed_sht.py +++ b/torch_harmonics/distributed/distributed_sht.py @@ -125,8 +125,8 @@ def extra_repr(self): def forward(self, x: torch.Tensor): - if x.dims() < 3: - raise ValueError(f"Expected tensor with at least 3 dimensions but got {x.dims()} instead") + if x.dim() < 3: + raise ValueError(f"Expected tensor with at least 3 dimensions but got {x.dim()} instead") # we need to ensure that we can split the channels evenly num_chans = x.shape[-3] @@ -237,8 +237,8 @@ def extra_repr(self): def forward(self, x: torch.Tensor): - if x.dims() < 3: - raise ValueError(f"Expected tensor with at least 3 dimensions but got {x.dims()} instead") + if x.dim() < 3: + raise ValueError(f"Expected tensor with at least 3 dimensions but got {x.dim()} instead") # we need to ensure that we can split the channels evenly num_chans = x.shape[-3] @@ -366,8 +366,8 @@ def extra_repr(self): def forward(self, x: torch.Tensor): - if x.dims() < 4: - raise ValueError(f"Expected tensor with at least 4 dimensions but got {x.dims()} instead") + if x.dim() < 4: + raise ValueError(f"Expected tensor with at least 4 dimensions but got {x.dim()} instead") # we need to ensure that we can split the channels evenly num_chans = x.shape[-4] @@ -490,8 +490,8 @@ def extra_repr(self): def forward(self, x: torch.Tensor): - if x.dims() < 4: - raise ValueError(f"Expected tensor with at least 4 dimensions but got {x.dims()} instead") + if x.dim() < 4: + raise ValueError(f"Expected tensor with at least 4 dimensions but got {x.dim()} instead") # store num channels num_chans = x.shape[-4] diff --git a/torch_harmonics/sht.py b/torch_harmonics/sht.py index afeb444..18da076 100644 --- a/torch_harmonics/sht.py +++ b/torch_harmonics/sht.py @@ -105,8 +105,8 @@ def extra_repr(self): def forward(self, x: torch.Tensor): - if x.dims() < 2: - raise ValueError(f"Expected tensor with at least 2 dimensions but got {x.dims()} instead") + if x.dim() < 2: + raise ValueError(f"Expected tensor with at least 2 dimensions but got {x.dim()} instead") assert(x.shape[-2] == self.nlat) assert(x.shape[-1] == self.nlon) @@ -275,8 +275,8 @@ def extra_repr(self): def forward(self, x: torch.Tensor): - if x.dims() < 3: - raise ValueError(f"Expected tensor with at least 3 dimensions but got {x.dims()} instead") + if x.dim() < 3: + raise ValueError(f"Expected tensor with at least 3 dimensions but got {x.dim()} instead") assert(x.shape[-2] == self.nlat) assert(x.shape[-1] == self.nlon) @@ -364,8 +364,8 @@ def extra_repr(self): def forward(self, x: torch.Tensor): - if x.dims() < 3: - raise ValueError(f"Expected tensor with at least 3 dimensions but got {x.dims()} instead") + if x.dim() < 3: + raise ValueError(f"Expected tensor with at least 3 dimensions but got {x.dim()} instead") assert(x.shape[-2] == self.lmax) assert(x.shape[-1] == self.mmax)