diff --git a/reconstructSPI/iterative_refinement/expectation_maximization.py b/reconstructSPI/iterative_refinement/expectation_maximization.py index 6f83445..ebaa780 100644 --- a/reconstructSPI/iterative_refinement/expectation_maximization.py +++ b/reconstructSPI/iterative_refinement/expectation_maximization.py @@ -223,7 +223,7 @@ def normalize_map(map_3d, counts, norm_const): Shape (n_pix, n_pix, n_pix) map normalized by counts. """ - return map_3d * counts / (norm_const + counts ** 2) + return map_3d * counts / (norm_const + counts**2) @staticmethod def apply_noise_model(map_3d_f_norm_1, map_3d_f_norm_2): @@ -371,7 +371,7 @@ def generate_slices(map_3d_f, xy_plane, n_pix, rots): map_3d_f = np.ones_like(map_3d_f) xyz_rotated = np.ones_like(xy_plane) - size = n_rotations * n_pix ** 2 + size = n_rotations * n_pix**2 slices = np.random.normal(size=size) slices = slices.reshape((n_rotations, n_pix, n_pix)) return slices, xyz_rotated @@ -432,7 +432,7 @@ def compute_bayesian_weights(particle, slices, sigma): ) slices_norm = np.linalg.norm(slices, axis=(1, 2)) ** 2 particle_norm = np.linalg.norm(particle) ** 2 - scale = -((2 * sigma ** 2) ** -1) + scale = -((2 * sigma**2) ** -1) log_bayesian_weights = scale * (slices_norm - 2 * corr_slices_particle) offset_safe = log_bayesian_weights.max() bayesian_weights = np.exp(log_bayesian_weights - offset_safe) @@ -550,8 +550,8 @@ def binary_mask_3d(center, radius, shape, fill=True, shell_thickness=1): a, b, c = center nx0, nx1, nx2 = shape x0, x1, x2 = np.ogrid[-a : nx0 - a, -b : nx1 - b, -c : nx2 - c] - r2 = x0 ** 2 + x1 ** 2 + x2 ** 2 - mask = r2 <= radius ** 2 + r2 = x0**2 + x1**2 + x2**2 + mask = r2 <= radius**2 if not fill and radius - shell_thickness > 0: mask_outer = mask mask_inner = r2 <= (radius - shell_thickness) ** 2