Skip to content

Commit

Permalink
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
tests
Browse files Browse the repository at this point in the history
0x00b1 committed Jul 3, 2024
1 parent 0bb4eea commit 7c5a674
Showing 1 changed file with 87 additions and 87 deletions.
174 changes: 87 additions & 87 deletions tests/beignet/test__differentiate_laguerre_polynomial.py
Original file line number Diff line number Diff line change
@@ -1,87 +1,87 @@
import beignet
import pytest
import torch


def test_differentiate_laguerre_polynomial():
with pytest.raises(TypeError):
beignet.differentiate_laguerre_polynomial(
torch.tensor([0]),
order=0.5,
)

with pytest.raises(ValueError):
beignet.differentiate_laguerre_polynomial(
torch.tensor([0]),
order=-1,
)

for i in range(5):
torch.testing.assert_close(
beignet.trim_laguerre_polynomial_coefficients(
beignet.differentiate_laguerre_polynomial(
torch.tensor([0.0] * i + [1.0]),
order=0,
),
tol=0.000001,
),
beignet.trim_laguerre_polynomial_coefficients(
torch.tensor([0.0] * i + [1.0]),
tol=0.000001,
),
)

for i in range(5):
for j in range(2, 5):
torch.testing.assert_close(
beignet.trim_laguerre_polynomial_coefficients(
beignet.differentiate_laguerre_polynomial(
beignet.integrate_laguerre_polynomial(
torch.tensor([0.0] * i + [1.0]),
order=j,
),
order=j,
),
tol=0.000001,
),
beignet.trim_laguerre_polynomial_coefficients(
torch.tensor([0.0] * i + [1.0]),
tol=0.000001,
),
)

for i in range(5):
for j in range(2, 5):
torch.testing.assert_close(
beignet.trim_laguerre_polynomial_coefficients(
beignet.differentiate_laguerre_polynomial(
beignet.integrate_laguerre_polynomial(
torch.tensor([0.0] * i + [1.0]),
order=j,
scale=2,
),
order=j,
scale=0.5,
),
tol=0.000001,
),
beignet.trim_laguerre_polynomial_coefficients(
torch.tensor([0.0] * i + [1.0]),
tol=0.000001,
),
)

# c2d = torch.rand(3, 4)

# torch.testing.assert_close(
# beignet.lagder(c2d, axis=0),
# torch.vstack([beignet.lagder(c) for c in c2d.T]).T,
# )

# torch.testing.assert_close(
# beignet.lagder(
# c2d,
# axis=1,
# ),
# torch.vstack([beignet.lagder(c) for c in c2d]),
# )
# import beignet
# import pytest
# import torch
#
#
# def test_differentiate_laguerre_polynomial():
# with pytest.raises(TypeError):
# beignet.differentiate_laguerre_polynomial(
# torch.tensor([0]),
# order=0.5,
# )
#
# with pytest.raises(ValueError):
# beignet.differentiate_laguerre_polynomial(
# torch.tensor([0]),
# order=-1,
# )
#
# for i in range(5):
# torch.testing.assert_close(
# beignet.trim_laguerre_polynomial_coefficients(
# beignet.differentiate_laguerre_polynomial(
# torch.tensor([0.0] * i + [1.0]),
# order=0,
# ),
# tol=0.000001,
# ),
# beignet.trim_laguerre_polynomial_coefficients(
# torch.tensor([0.0] * i + [1.0]),
# tol=0.000001,
# ),
# )
#
# for i in range(5):
# for j in range(2, 5):
# torch.testing.assert_close(
# beignet.trim_laguerre_polynomial_coefficients(
# beignet.differentiate_laguerre_polynomial(
# beignet.integrate_laguerre_polynomial(
# torch.tensor([0.0] * i + [1.0]),
# order=j,
# ),
# order=j,
# ),
# tol=0.000001,
# ),
# beignet.trim_laguerre_polynomial_coefficients(
# torch.tensor([0.0] * i + [1.0]),
# tol=0.000001,
# ),
# )
#
# for i in range(5):
# for j in range(2, 5):
# torch.testing.assert_close(
# beignet.trim_laguerre_polynomial_coefficients(
# beignet.differentiate_laguerre_polynomial(
# beignet.integrate_laguerre_polynomial(
# torch.tensor([0.0] * i + [1.0]),
# order=j,
# scale=2,
# ),
# order=j,
# scale=0.5,
# ),
# tol=0.000001,
# ),
# beignet.trim_laguerre_polynomial_coefficients(
# torch.tensor([0.0] * i + [1.0]),
# tol=0.000001,
# ),
# )
#
# # c2d = torch.rand(3, 4)
#
# # torch.testing.assert_close(
# # beignet.lagder(c2d, axis=0),
# # torch.vstack([beignet.lagder(c) for c in c2d.T]).T,
# # )
#
# # torch.testing.assert_close(
# # beignet.lagder(
# # c2d,
# # axis=1,
# # ),
# # torch.vstack([beignet.lagder(c) for c in c2d]),
# # )

0 comments on commit 7c5a674

Please sign in to comment.