Skip to content

Commit

Permalink
reduce size of SPDHG vs PDHG test data (#1898)
Browse files Browse the repository at this point in the history
  • Loading branch information
paskino authored Aug 23, 2024
1 parent 8050a8c commit 4f6e3cf
Show file tree
Hide file tree
Showing 2 changed files with 62 additions and 113 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
- Internal refactor: Separate framework into multiple files (#1692)
- Testing:
- New unit tests for operators and functions to check for in place errors and the behaviour of `out` (#1805)
- Updates in SPDHG vs PDHG unit test to reduce test time and adjustments to parameters (#1898)
- Bug fixes:
- `ImageData` removes dimensions of size 1 from the input array. This fixes an issue where single slice reconstructions from 3D data would fail due to shape mismatches (#1885)
- Make Binner accept accelerated=False (#1887)
Expand Down
174 changes: 61 additions & 113 deletions Wrappers/Python/test/test_algorithms.py
Original file line number Diff line number Diff line change
Expand Up @@ -1106,10 +1106,23 @@ def test_SIRT_with_TV_warm_start(self):

class TestSPDHG(unittest.TestCase):

def add_noise(self, sino, noise='gaussian', seed=10):
if noise == 'poisson':
scale = 5
noisy_data = scale * applynoise.poisson(sino/scale, seed=seed)
elif noise == 'gaussian':
noisy_data = applynoise.gaussian(sino, var=0.1, seed=seed)
else:
raise ValueError('Unsupported Noise ', noise)
return noisy_data

@unittest.skipUnless(has_astra, "cil-astra not available")
def test_SPDHG_vs_PDHG_implicit(self):
data = dataexample.SIMPLE_PHANTOM_2D.get(size=(128, 128))

data = dataexample.SIMPLE_PHANTOM_2D.get(size=(16, 16))
alpha = 0.05
num_subsets = 10


ig = data.geometry
ig.voxel_size_x = 0.1
ig.voxel_size_y = 0.1
Expand All @@ -1124,30 +1137,16 @@ def test_SPDHG_vs_PDHG_implicit(self):
Aop = ProjectionOperator(ig, ag, dev)

sin = Aop.direct(data)
# Create noisy data. Apply Gaussian noise
noises = ['gaussian', 'poisson']
noise = noises[1]
noisy_data = ag.allocate()
if noise == 'poisson':
np.random.seed(10)
scale = 20
eta = 0
noisy_data.fill(np.random.poisson(
scale * (eta + sin.as_array()))/scale)
elif noise == 'gaussian':
np.random.seed(10)
n1 = np.random.normal(0, 0.1, size=ag.shape)
noisy_data.fill(n1 + sin.as_array())
else:
raise ValueError('Unsupported Noise ', noise)
# Create noisy data.
noisy_data = self.add_noise(sin, noise='poisson', seed=10)



# Create BlockOperator
operator = Aop
f = KullbackLeibler(b=noisy_data)
alpha = 0.005
g = alpha * TotalVariation(50, 1e-4, lower=0, warm_start=True)
normK = operator.norm()

g = alpha * TotalVariation(10, 1e-4, lower=0, warm_start=True)

# % 'implicit' PDHG, preconditioned step-sizes
tau_tmp = 1.
sigma_tmp = 1.
Expand All @@ -1159,57 +1158,40 @@ def test_SPDHG_vs_PDHG_implicit(self):

# Setup and run the PDHG algorithm
pdhg = PDHG(f=f, g=g, operator=operator, tau=tau, sigma=sigma,
max_iteration=1000,
update_objective_interval=500)
pdhg.run(1000,verbose=0)

subsets = 10
size_of_subsets = int(len(angles)/subsets)
# take angles and create uniform subsets in uniform+sequential setting
list_angles = [angles[i:i+size_of_subsets]
for i in range(0, len(angles), size_of_subsets)]
# create acquisitioin geometries for each the interval of splitting angles
list_geoms = [AcquisitionGeometry.create_Parallel2D().set_angles(list_angles[i], angle_unit='radian').set_panel(detectors, 0.1)
for i in range(len(list_angles))]
# create with operators as many as the subsets
A = BlockOperator(*[ProjectionOperator(ig, list_geoms[i], dev)
for i in range(subsets)])
# number of subsets
# (sub2ind, ind2sub) = divide_1Darray_equally(range(len(A)), subsets)
#
# acquisisiton data
AD_list = []
for sub_num in range(subsets):
for i in range(0, len(angles), size_of_subsets):
arr = noisy_data.as_array()[i:i+size_of_subsets, :]
AD_list.append(AcquisitionData(
arr, geometry=list_geoms[sub_num]))

g = BlockDataContainer(*AD_list)
pdhg.run(100)
# %% 'explicit' SPDHG, scalar step-sizes
subsets = num_subsets
#partition the data
data_sub = noisy_data.partition(subsets, mode='staggered')
A = ProjectionOperator(ig, data_sub.geometry, dev)

# block function
F = BlockFunction(*[KullbackLeibler(b=g[i]) for i in range(subsets)])
G = alpha * TotalVariation(50, 1e-4, lower=0, warm_start=True)
F = BlockFunction(*[KullbackLeibler(b=data_sub[i]) for i in range(subsets)])
G = alpha * TotalVariation(2, 1e-4, lower=0, warm_start=True)

prob = [1/len(A)]*len(A)
spdhg = SPDHG(f=F, g=G, operator=A,
max_iteration=1000,
update_objective_interval=200, prob=prob)
spdhg.run(1000, verbose=0)
update_objective_interval=200, prob=prob)
spdhg.run(20 * subsets)

qm = (mae(spdhg.get_output(), pdhg.get_output()),
mse(spdhg.get_output(), pdhg.get_output()),
psnr(spdhg.get_output(), pdhg.get_output())
)
log.info("Quality measures %r", qm)

np.testing.assert_almost_equal(mae(spdhg.get_output(), pdhg.get_output()),
0.000335, decimal=3)
0.00189242, decimal=3)
np.testing.assert_almost_equal(mse(spdhg.get_output(), pdhg.get_output()),
5.51141e-06, decimal=3)
3.9063532e-05, decimal=3)

@unittest.skipUnless(has_astra, "ccpi-astra not available")
def test_SPDHG_vs_PDHG_explicit(self):
data = dataexample.SIMPLE_PHANTOM_2D.get(size=(128, 128))
alpha = .05
num_subsets = 10

data = dataexample.SIMPLE_PHANTOM_2D.get(size=(16, 16))

ig = data.geometry
ig.voxel_size_x = 0.1
Expand All @@ -1225,61 +1207,33 @@ def test_SPDHG_vs_PDHG_explicit(self):
Aop = ProjectionOperator(ig, ag, dev)

sin = Aop.direct(data)
# Create noisy data. Apply Gaussian noise
noises = ['gaussian', 'poisson']
noise = noises[1]
if noise == 'poisson':
scale = 5
noisy_data = scale * applynoise.poisson(sin/scale, seed=10)
# np.random.seed(10)
# scale = 5
# eta = 0
# noisy_data = AcquisitionData(np.random.poisson( scale * (eta + sin.as_array()))/scale, ag)
elif noise == 'gaussian':
noisy_data = noise.gaussian(sin, var=0.1, seed=10)
else:
raise ValueError('Unsupported Noise ', noise)
# Create noisy data.
noisy_data = self.add_noise(sin, noise='poisson', seed=10)

# %% 'explicit' SPDHG, scalar step-sizes
subsets = 10
size_of_subsets = int(len(angles)/subsets)
subsets = num_subsets
# create Gradient operator
op1 = GradientOperator(ig)
# take angles and create uniform subsets in uniform+sequential setting
list_angles = [angles[i:i+size_of_subsets]
for i in range(0, len(angles), size_of_subsets)]
# create acquisitioin geometries for each the interval of splitting angles
list_geoms = [AcquisitionGeometry.create_Parallel2D().set_angles(list_angles[i], angle_unit='radian').set_panel(detectors, 0.1)
for i in range(len(list_angles))]
# create with operators as many as the subsets
A = BlockOperator(*[ProjectionOperator(ig, list_geoms[i], dev)
for i in range(subsets)] + [op1])
# number of subsets
# (sub2ind, ind2sub) = divide_1Darray_equally(range(len(A)), subsets)
#
# acquisisiton data
AD_list = []
for sub_num in range(subsets):
for i in range(0, len(angles), size_of_subsets):
arr = noisy_data.as_array()[i:i+size_of_subsets, :]
AD_list.append(AcquisitionData(
arr, geometry=list_geoms[sub_num]))

g = BlockDataContainer(*AD_list)
alpha = 0.5
op1 = alpha * GradientOperator(ig)

#partition the data
data_sub = noisy_data.partition(subsets, mode='staggered')
A = ProjectionOperator(ig, data_sub.geometry, dev)
operators = list(A.operators) + [op1]
K = BlockOperator(* operators )


# block function
F = BlockFunction(*[*[KullbackLeibler(b=g[i])
for i in range(subsets)] + [alpha * MixedL21Norm()]])
F = BlockFunction(*[*[KullbackLeibler(b=data_sub[i])
for i in range(subsets)] + [MixedL21Norm()]])
G = IndicatorBox(lower=0)

prob = [1/(2*subsets)]*(len(A)-1) + [1/2]
spdhg = SPDHG(f=F, g=G, operator=A,
max_iteration=1000,
update_objective_interval=200, prob=prob)
spdhg.run(1000, verbose=0)
prob = [1/(2*subsets)]*(len(K)-1) + [1/2]
spdhg = SPDHG(f=F, g=G, operator=K,
update_objective_interval=200, prob=prob)
spdhg.run(20 * subsets)

# %% 'explicit' PDHG, scalar step-sizes
op1 = GradientOperator(ig)
op1 = alpha * GradientOperator(ig)
op2 = Aop
# Create BlockOperator
operator = BlockOperator(op1, op2, shape=(2, 1))
Expand All @@ -1289,28 +1243,22 @@ def test_SPDHG_vs_PDHG_explicit(self):
sigma = 1/normK
tau = 1/normK

f1 = alpha * MixedL21Norm()
f1 = MixedL21Norm()
f = BlockFunction(f1, f2)
# Setup and run the PDHG algorithm
pdhg = PDHG(f=f, g=g, operator=operator, tau=tau, sigma=sigma)
pdhg.max_iteration = 1000
pdhg.update_objective_interval = 200
pdhg.run(1000, verbose=0)

# %% show diff between PDHG and SPDHG
# plt.imshow(spdhg.get_output().as_array() -pdhg.get_output().as_array())
# plt.colorbar()
# plt.show()
pdhg.run(100)

qm = (mae(spdhg.get_output(), pdhg.get_output()),
mse(spdhg.get_output(), pdhg.get_output()),
psnr(spdhg.get_output(), pdhg.get_output())
)
log.info("Quality measures: %r", qm)
np.testing.assert_almost_equal(mae(spdhg.get_output(), pdhg.get_output()),
0.00150, decimal=3)
0.0031962995, decimal=3)
np.testing.assert_almost_equal(mse(spdhg.get_output(), pdhg.get_output()),
1.68590e-05, decimal=3)
4.242368e-05, decimal=3)


class TestCallbacks(unittest.TestCase):
Expand Down

0 comments on commit 4f6e3cf

Please sign in to comment.