Skip to content

Commit

Permalink
[pre-commit.ci] auto fixes from pre-commit.com hooks
Browse files Browse the repository at this point in the history
for more information, see https://pre-commit.ci
  • Loading branch information
pre-commit-ci[bot] committed Feb 10, 2025
1 parent 2b965e4 commit f7e5830
Show file tree
Hide file tree
Showing 53 changed files with 200 additions and 224 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,7 @@ def show_calibration_stage1(raw_flood_ws_name, database_file):
# Plot flood workspace raw and calibrated
print("#####\n\nCompare applying the calibration to flood (stage 1)")

calibrated_flood_ws_name = f'demo_calibrated1_flood_{raw_flood_ws_name.split("flood_")[1]}'
calibrated_flood_ws_name = f"demo_calibrated1_flood_{raw_flood_ws_name.split('flood_')[1]}"
apply_calibrations(
raw_flood_ws_name,
output_workspace=calibrated_flood_ws_name,
Expand Down
5 changes: 2 additions & 3 deletions src/drtsans/auto_wedge.py
Original file line number Diff line number Diff line change
Expand Up @@ -501,8 +501,7 @@ def _estimatePeakParameters(intensity, azimuthal, azimuthal_start, window_half_w
break
# output
print(
f"[WEDGE FIT] azimuthal: {azimuthal_new}, {azimuthal_last} with "
f"left and right as {left_index}, {right_index}"
f"[WEDGE FIT] azimuthal: {azimuthal_new}, {azimuthal_last} with left and right as {left_index}, {right_index}"
)

# now use the first two moments of the data within the window to give an improved center position (first moment)
Expand Down Expand Up @@ -837,7 +836,7 @@ def _fitQAndAzimuthal(
fit_result_dict[index]["error"] = error_reason
continue
else:
fitted_peaks_message += f"spectrum {index-1}: Fitted peaks: {newlyFittedPeaks}\n"
fitted_peaks_message += f"spectrum {index - 1}: Fitted peaks: {newlyFittedPeaks}\n"
for i in range(len(peakResults)):
peakResults[i].append(newlyFittedPeaks[i])
q_centers_used.append(q_center)
Expand Down
4 changes: 2 additions & 2 deletions src/drtsans/dataobjects.py
Original file line number Diff line number Diff line change
Expand Up @@ -619,10 +619,10 @@ def __new__(cls, intensity, error, qx, qy, delta_qx=None, delta_qy=None, wavelen

# Sanity check
assert qx.shape == intensity.shape, (
f"qx and intensity must have same shapes. " f"It is not now: {qx.shape} vs {intensity.shape}"
f"qx and intensity must have same shapes. It is not now: {qx.shape} vs {intensity.shape}"
)
assert qy.shape == intensity.shape, (
f"qy and intensity must have same shapes. " f"It is not now: {qy.shape} vs {intensity.shape}"
f"qy and intensity must have same shapes. It is not now: {qy.shape} vs {intensity.shape}"
)

# pass everything to namedtuple
Expand Down
4 changes: 2 additions & 2 deletions src/drtsans/detector.py
Original file line number Diff line number Diff line change
Expand Up @@ -228,7 +228,7 @@ def _detector_first_ws_index(self, first_det_id):
self.first_index = ws_index
break
else:
raise ValueError("Iterared WS and did not find first det id = " "{}".format(first_det_id))
raise ValueError("Iterared WS and did not find first det id = {}".format(first_det_id))

def masked_ws_indices(self):
"""
Expand Down Expand Up @@ -256,7 +256,7 @@ def monitor_indices(self):
return np.array([])

def __str__(self):
return "Component: {} with {} pixels (dim x={}, dim y={})." " First index = {}.".format(
return "Component: {} with {} pixels (dim x={}, dim y={}). First index = {}.".format(
self._component_name,
self.dims,
self.dim_x,
Expand Down
6 changes: 2 additions & 4 deletions src/drtsans/determine_bins.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ def determine_1d_linear_bins(x_min, x_max, bins):
# Check input x min and x max
if x_min is None or x_max is None or x_min >= x_max:
raise RuntimeError(
"x min {} and x max {} must not be None and x min shall be less than x max" "".format(x_min, x_max)
"x min {} and x max {} must not be None and x min shall be less than x max".format(x_min, x_max)
)
# force the number of bins to be an integer and error check it
bins = int(bins)
Expand Down Expand Up @@ -104,9 +104,7 @@ def determine_1d_log_bins(x_min, x_max, decade_on_center, n_bins_per_decade=None

# case that is not supported
if decade_on_center:
assert n_bins_per_decade is not None, (
"For option decade_on_center, number of bins per decade " "is required"
)
assert n_bins_per_decade is not None, "For option decade_on_center, number of bins per decade is required"
x_ref = x_min

# calculate bin step size
Expand Down
10 changes: 5 additions & 5 deletions src/drtsans/files/hdf5_rw.py
Original file line number Diff line number Diff line change
Expand Up @@ -79,7 +79,7 @@ def match(self, other_node):
# compare class type
if not isinstance(other_node, type(self)):
raise TypeError(
"Try to match instance of class {} (other) to {} (self)" "".format(type(other_node), type(self))
"Try to match instance of class {} (other) to {} (self)".format(type(other_node), type(self))
)

# compare name
Expand All @@ -89,14 +89,14 @@ def match(self, other_node):
# compare attributes
if set(self._attributes.keys()) != set(other_node.attributes.keys()):
print(
"Data node {} Attributes are not same:\nself - other = {}]\nother - self = {}" "".format(
"Data node {} Attributes are not same:\nself - other = {}]\nother - self = {}".format(
self.name,
set(self._attributes.keys()) - set(other_node.attributes.keys()),
set(other_node.attributes.keys()) - set(self._attributes.keys()),
)
)
raise KeyError(
"Data node {} Attributes are not same:\nself - other = {}]\nother - self = {}" "".format(
"Data node {} Attributes are not same:\nself - other = {}]\nother - self = {}".format(
self.name,
set(self._attributes.keys()) - set(other_node.attributes.keys()),
set(other_node.attributes.keys()) - set(self._attributes.keys()),
Expand All @@ -107,7 +107,7 @@ def match(self, other_node):
error_msg = ""
for attr_name in self._attributes.keys():
if self._attributes[attr_name] != other_node.attributes[attr_name]:
error_msg += "Mismatch attribute {} value: self = {}, other = {}" "".format(
error_msg += "Mismatch attribute {} value: self = {}, other = {}".format(
attr_name,
self._attributes[attr_name],
other_node.attributes[attr_name],
Expand Down Expand Up @@ -186,7 +186,7 @@ def write_attributes(self, curr_entry):
except TypeError as type_error:
print(f"[ERROR] {self._name}-node attribute {attr_name} is of type {type(attr_name)}")
raise TypeError(
f"[ERROR] {self._name}-node attribute {attr_name} is of type " f"{type(attr_name)}: {type_error}"
f"[ERROR] {self._name}-node attribute {attr_name} is of type {type(attr_name)}: {type_error}"
)


Expand Down
4 changes: 2 additions & 2 deletions src/drtsans/files/log_h5_reader.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ def compare_reduced_iq(test_log_file, gold_log_file, title: str, prefix: str):

# Output error message
if test_exception:
base_name = f'{prefix}{os.path.basename(test_log_file).split(".")[0]}'
base_name = f"{prefix}{os.path.basename(test_log_file).split('.')[0]}"
report_difference(
(test_q_vec, test_intensity_vec),
(gold_q_vec, gold_intensity_vec),
Expand Down Expand Up @@ -211,7 +211,7 @@ def verify_cg2_reduction_results(sample_names, output_dir, gold_path, title, pre
try:
compare_reduced_iq(output_log_file, gold_log_file, title_i, prefix)
except AssertionError as unmatched_error:
unmatched_errors = "Testing output {} is different from gold result {}:\n{}" "".format(
unmatched_errors = "Testing output {} is different from gold result {}:\n{}".format(
output_log_file, gold_log_file, unmatched_error
)
# END-FOR
Expand Down
6 changes: 3 additions & 3 deletions src/drtsans/geometry.py
Original file line number Diff line number Diff line change
Expand Up @@ -131,7 +131,7 @@ def bank_workspace_index_range(input_workspace, component=""):
for i in range(input_workspace.getNumberHistograms()):
ids = input_workspace.getSpectrum(i).getDetectorIDs()
if len(ids) > 1:
raise RuntimeError("do not know how to work with more than one " "detector per spectrum ({})".format(ids))
raise RuntimeError("do not know how to work with more than one detector per spectrum ({})".format(ids))
if ids[0] == detector_id_first:
first = i
break
Expand Down Expand Up @@ -786,7 +786,7 @@ def translate_sample_by_z(workspace, z):
)
workspace = mtd[ws_name]
logger.debug(
"Instrument sample position is moved to {}" "".format(workspace.getInstrument().getSample().getPos())
"Instrument sample position is moved to {}".format(workspace.getInstrument().getSample().getPos())
)

# update the appropriate log
Expand Down Expand Up @@ -836,7 +836,7 @@ def translate_detector_by_z(input_workspace, z=None, relative=True):
update_log = True
if (not relative) or (z != 0.0):
logger.debug(
"Moving detector along Z = {} is relative = {} to component {}" "".format(
"Moving detector along Z = {} is relative = {} to component {}".format(
z, relative, main_detector_name(input_workspace)
)
)
Expand Down
4 changes: 2 additions & 2 deletions src/drtsans/iq.py
Original file line number Diff line number Diff line change
Expand Up @@ -117,14 +117,14 @@ def valid_wedge(min_angle, max_angle) -> List[Tuple[float, float]]:
if diff < 180.0:
return [(min_angle, max_angle)]
raise ValueError(
"wedge angle is greater than 180 degrees: {:.1f} - {:.1f} = {:.1f} < 180" "".format(
"wedge angle is greater than 180 degrees: {:.1f} - {:.1f} = {:.1f} < 180".format(
max_angle, min_angle, diff
)
)
diff = min_angle - max_angle
if diff <= 180:
raise ValueError(
"wedge angle is greater than 180 degrees: {:.1f} - {:.1f} = {:.1f} <= 180" "".format(
"wedge angle is greater than 180 degrees: {:.1f} - {:.1f} = {:.1f} <= 180".format(
min_angle, max_angle, diff
)
)
Expand Down
2 changes: 1 addition & 1 deletion src/drtsans/momentum_transfer.py
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,7 @@ def convert_to_q(ws, mode, resolution_function=None, **kwargs):
wsh = mtd[str(ws)]
if wsh.getAxis(0).getUnit().unitID() != "Wavelength":
raise RuntimeError(
"Input workspace {} for calculate Q and resolution must be in unit Wavelength but not {}" "".format(
"Input workspace {} for calculate Q and resolution must be in unit Wavelength but not {}".format(
wsh, wsh.getAxis(0).getUnit().unitID()
)
)
Expand Down
6 changes: 3 additions & 3 deletions src/drtsans/mono/biosans/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -1516,8 +1516,8 @@ def _prepare_sample_transmission_ws(_sample_transmission):
if debug_output:
from mantid.simpleapi import SaveNexusProcessed

main_name = f'{form_output_name(processed_data_main).split(".")[0]}.nxs'
wing_name = f'{form_output_name(processed_data_wing).split(".")[0]}.nxs'
main_name = f"{form_output_name(processed_data_main).split('.')[0]}.nxs"
wing_name = f"{form_output_name(processed_data_wing).split('.')[0]}.nxs"
# remove history to write less data and speed up I/O
if reduction_config["removeAlgorithmHistory"]:
RemoveWorkspaceHistory(processed_data_main)
Expand All @@ -1536,7 +1536,7 @@ def _prepare_sample_transmission_ws(_sample_transmission):
backend="mpl",
) # , imshow_kwargs={'norm': LogNorm(vmin=1)})
if reduction_config["has_midrange_detector"]:
midrange_name = f'{form_output_name(processed_data_midrange).split(".")[0]}.nxs'
midrange_name = f"{form_output_name(processed_data_midrange).split('.')[0]}.nxs"
# remove history to write less data and speed up I/O
if reduction_config["removeAlgorithmHistory"]:
RemoveWorkspaceHistory(processed_data_midrange)
Expand Down
4 changes: 2 additions & 2 deletions src/drtsans/mono/biosans/cg3_spice_to_nexus.py
Original file line number Diff line number Diff line change
Expand Up @@ -126,7 +126,7 @@ def get_pid_range(self, bank_id):

# Check input valid
if bank_id < 1 or bank_id > self.num_banks:
raise RuntimeError(f"CG3 (BioSANS) has 88 banks indexed from 1 to 88. " f"Bank {bank_id} is out of range.")
raise RuntimeError(f"CG3 (BioSANS) has 88 banks indexed from 1 to 88. Bank {bank_id} is out of range.")

# calculate starting PID
if bank_id <= 24:
Expand Down Expand Up @@ -212,7 +212,7 @@ def convert_spice_to_nexus(
os.mkdir(output_dir)
except (OSError, IOError) as dir_err:
raise RuntimeError(
f"Output directory {output_dir} doesn't exist." f"Unable to create {output_dir} due to {dir_err}"
f"Output directory {output_dir} doesn't exist.Unable to create {output_dir} due to {dir_err}"
)

# output file name
Expand Down
4 changes: 2 additions & 2 deletions src/drtsans/mono/convert_xml_to_nexus.py
Original file line number Diff line number Diff line change
Expand Up @@ -224,7 +224,7 @@ def mask_spice_detector_pixels(self, pixel_index_list: List[int]):
self._spice_detector_counts[pid] = 0
except IndexError as index_error:
raise RuntimeError(
f"Pixel ID {pid} is out of range {self._spice_detector_counts.shape}. " f"FYI: {index_error}"
f"Pixel ID {pid} is out of range {self._spice_detector_counts.shape}. FYI: {index_error}"
)

@staticmethod
Expand Down Expand Up @@ -293,7 +293,7 @@ def _retrieve_meta_data(spice_file_name, das_spice_log_map):
# check unit
if unit != default_unit:
raise RuntimeError(
f"SPICE log {spice_log_name} has unit {unit} different from " f"expected {default_unit}"
f"SPICE log {spice_log_name} has unit {unit} different from expected {default_unit}"
)

das_log_values[nexus_log_name] = value, unit
Expand Down
12 changes: 4 additions & 8 deletions src/drtsans/mono/gpsans/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -321,9 +321,7 @@ def load_all_files(
smearing_pixel_size_y=smearing_pixel_size_y_dict[meta_data.SAMPLE],
)
logger.information(
"[META] Wavelength range is from {} to {}" "".format(
mtd[ws_name].readX(0)[0], mtd[ws_name].readX(0)[1]
)
"[META] Wavelength range is from {} to {}".format(mtd[ws_name].readX(0)[0], mtd[ws_name].readX(0)[1])
)
# Apply mask
for btp_params in default_mask:
Expand Down Expand Up @@ -820,7 +818,7 @@ def prepare_data_workspaces(
mask_btp = dict()
if debug:
# output masking information
logger.notice(f"mask panel: {mask_panel}\n" f"mask ws : {str(mask_ws)}\n" f"mask btp : {mask_btp}")
logger.notice(f"mask panel: {mask_panel}\nmask ws : {str(mask_ws)}\nmask btp : {mask_btp}")
if mask_ws is not None:
SaveNexusProcessed(
InputWorkspace=mask_ws,
Expand Down Expand Up @@ -1162,7 +1160,7 @@ def reduce_single_configuration(loaded_ws, reduction_input, prefix="", skip_nan=
}
# auto-aniso returns all of the wedges
symmetric_wedges = False
logger.debug(f'Wedge peak search window size factor: {autoWedgeOpts["peak_search_window_size_factor"]}')
logger.debug(f"Wedge peak search window size factor: {autoWedgeOpts['peak_search_window_size_factor']}")

fbc_options = fbc_options_json(reduction_input)
xc, yc, fit_results = find_beam_center(loaded_ws.center, **fbc_options)
Expand Down Expand Up @@ -1313,9 +1311,7 @@ def reduce_single_configuration(loaded_ws, reduction_input, prefix="", skip_nan=
logger.notice(f"Auto wedge options: {autoWedgeOpts}")
autoWedgeOpts["debug_dir"] = output_dir
wedges = getWedgeSelection(iq2d_main_in, **autoWedgeOpts)
logger.notice(
f"found wedge angles:\n" f" peak: {wedges[0]}\n" f" background: {wedges[1]}"
)
logger.notice(f"found wedge angles:\n peak: {wedges[0]}\n background: {wedges[1]}")
# sanity check
assert len(wedges) == 2, f"Auto-wedges {wedges} shall have 2 2-tuples"

Expand Down
2 changes: 1 addition & 1 deletion src/drtsans/mono/gpsans/cg2_spice_to_nexus.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ def get_pid_range(self, bank_id):
"""
# Check input valid
if bank_id < 1 or bank_id > 48:
raise RuntimeError(f"CG2 (GP-SANS) has 88 banks indexed from 1 to 48. " f"Bank {bank_id} is out of range.")
raise RuntimeError(f"CG2 (GP-SANS) has 88 banks indexed from 1 to 48. Bank {bank_id} is out of range.")

# calculate starting PID
if bank_id <= 24:
Expand Down
8 changes: 4 additions & 4 deletions src/drtsans/mono/load.py
Original file line number Diff line number Diff line change
Expand Up @@ -296,7 +296,7 @@ def set_sample_detector_position(
raise RuntimeError(
f"Workspace {str(ws)}: after loading and initial setup, DAS SDD ({das_sdd})"
f"is not equal to calculated/real SDD ({real_sdd}) by proportion as "
f"{abs(das_sdd - real_sdd)/das_sdd}"
f"{abs(das_sdd - real_sdd) / das_sdd}"
)

# Get original sample detector distance: find expected SDD for further verification
Expand All @@ -318,7 +318,7 @@ def set_sample_detector_position(
f"Prior to any geometry correction:\n"
f"Sample to detector distance = {sample_detector_distance(ws, search_logs=False)}"
f"(calculated) vs {sample_detector_distance(ws, search_logs=True)} (meta) mm.\n"
f' SampleToSi = {logs.find_log_with_units(sample_to_si_window_name, unit="mm")} mm\n'
f" SampleToSi = {logs.find_log_with_units(sample_to_si_window_name, unit='mm')} mm\n"
f"Overwrite Values = {sample_si_window_overwrite_value}, "
f"{sample_detector_distance_overwrite_value}\n"
)
Expand Down Expand Up @@ -346,7 +346,7 @@ def set_sample_detector_position(

# Check current instrument setup and meta data (sample logs)
logger.notice(
"{} Sample to detector distance = {} (calculated) vs {} (meta) mm" "".format(
"{} Sample to detector distance = {} (calculated) vs {} (meta) mm".format(
str(ws),
sample_detector_distance(ws, search_logs=False),
sample_detector_distance(ws, search_logs=True),
Expand All @@ -363,7 +363,7 @@ def set_sample_detector_position(
prior_geom_info += (
f"Result from geometry operation:\n"
f"Sample position = {ws.getInstrument().getSample().getPos()}\n"
f'SampleToSi = {logs.find_log_with_units(sample_to_si_window_name, unit="mm")}'
f"SampleToSi = {logs.find_log_with_units(sample_to_si_window_name, unit='mm')}"
f"mm (From Log)\n"
)
# add detector information
Expand Down
Loading

0 comments on commit f7e5830

Please sign in to comment.