Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix failing CI for CPU-only tests #329

Closed
wants to merge 2 commits into from

Update conda env file for CPU-only tests CI

bdb5fc8
Select commit
Loading
Failed to load commit list.
Sign in for the full log view
Closed

Fix failing CI for CPU-only tests #329

Update conda env file for CPU-only tests CI
bdb5fc8
Select commit
Loading
Failed to load commit list.
GitHub Actions / Black failed May 13, 2024 in 0s

146 errors

Black found 146 errors

Annotations

Check failure on line 15 in /home/runner/work/httomo/httomo/conda/recipe/samples/python_templates/pipeline_cpu1.py

See this annotation in the file changed.

@github-actions github-actions / Black

/home/runner/work/httomo/httomo/conda/recipe/samples/python_templates/pipeline_cpu1.py#L3-L15

 MethodConfig: TypeAlias = Dict[str, Dict[str, Any]]
 PipelineStageConfig: TypeAlias = List[MethodConfig]
 PipelineConfig: TypeAlias = List[PipelineStageConfig]
 
 # NOTE: when creating a Pythonic pipeline, please use
-# the function's name "methods_to_list" so it will be 
+# the function's name "methods_to_list" so it will be
 # found by the loader
+
 
 def methods_to_list() -> PipelineConfig:
     """Pythonic way to build a list of tasks
     from which Pipeline can be generated in Httomo.
     This accompaniments the YAML interface.

Check failure on line 91 in /home/runner/work/httomo/httomo/conda/recipe/samples/python_templates/pipeline_cpu1.py

See this annotation in the file changed.

@github-actions github-actions / Black

/home/runner/work/httomo/httomo/conda/recipe/samples/python_templates/pipeline_cpu1.py#L16-L91

     Returns:
         PipelineConfig: A list of tasks in "full_pipeline_list" that can be executed in Httomo.
     """
     full_pipeline_list = []
     loader = {
-        'method': "standard_tomo",
-        'module_path': "httomo.data.hdf.loaders",
-        'parameters' : {
-                     'name': 'tomo',
-                     'data_path': 'entry1/tomo_entry/data/data',
-                     'image_key_path': 'entry1/tomo_entry/instrument/detector/image_key',
-                     'rotation_angles': {"data_path": "/entry1/tomo_entry/data/rotation_angle"},
-                     'dimension': 1,
-                     'pad': 0,
-                       },
-                    }
+        "method": "standard_tomo",
+        "module_path": "httomo.data.hdf.loaders",
+        "parameters": {
+            "name": "tomo",
+            "data_path": "entry1/tomo_entry/data/data",
+            "image_key_path": "entry1/tomo_entry/instrument/detector/image_key",
+            "rotation_angles": {"data_path": "/entry1/tomo_entry/data/rotation_angle"},
+            "dimension": 1,
+            "pad": 0,
+        },
+    }
     full_pipeline_list.append(loader)
     method1 = {
-        'method': "normalize",
-        'module_path': "tomopy.prep.normalize",
-        'parameters' : {
-                     'cutoff': None,
-                       },
-                    }
+        "method": "normalize",
+        "module_path": "tomopy.prep.normalize",
+        "parameters": {
+            "cutoff": None,
+        },
+    }
     full_pipeline_list.append(method1)
     method2 = {
-        'method': "minus_log",
-        'module_path': "tomopy.prep.normalize",
-        'parameters' : {},
-                    }
+        "method": "minus_log",
+        "module_path": "tomopy.prep.normalize",
+        "parameters": {},
+    }
     full_pipeline_list.append(method2)
     method3 = {
-        'method': "find_center_vo",
-        'module_path': "tomopy.recon.rotation",
-        'id': "centering",
-        'parameters' : {
-                     'ind': "mid",
-                     'smin': -50,
-                     'smax': 50,
-                     'srad': 6,
-                     'step': 0.25,
-                     'ratio': 0.5,
-                     'drop': 20,
-                       },
-        'side_outputs': {"cor": "centre_of_rotation"},
-                    }
+        "method": "find_center_vo",
+        "module_path": "tomopy.recon.rotation",
+        "id": "centering",
+        "parameters": {
+            "ind": "mid",
+            "smin": -50,
+            "smax": 50,
+            "srad": 6,
+            "step": 0.25,
+            "ratio": 0.5,
+            "drop": 20,
+        },
+        "side_outputs": {"cor": "centre_of_rotation"},
+    }
     full_pipeline_list.append(method3)
     method4 = {
-        'method': "recon",
-        'module_path': "tomopy.recon.algorithm",
-        'parameters' : {
-                     'center': "${{centering.side_outputs.centre_of_rotation}}",
-                     'sinogram_order': False,
-                     'algorithm': "gridrec",
-                     'init_recon': None,
-                       },
-                    }
+        "method": "recon",
+        "module_path": "tomopy.recon.algorithm",
+        "parameters": {
+            "center": "${{centering.side_outputs.centre_of_rotation}}",
+            "sinogram_order": False,
+            "algorithm": "gridrec",
+            "init_recon": None,
+        },
+    }
     full_pipeline_list.append(method4)
     method5 = {
-        'method': "save_to_images",
-        'module_path': "httomolib.misc.images",
-        'parameters' : {
-                     'subfolder_name': "images",
-                     'axis': 1,
-                     'file_format': "tif",
-                     'bits': 8,
-                     'perc_range_min': 0.0,
-                     'perc_range_max': 100.0,
-                     'jpeg_quality': 95,
-                       },
-                    }
+        "method": "save_to_images",
+        "module_path": "httomolib.misc.images",
+        "parameters": {
+            "subfolder_name": "images",
+            "axis": 1,
+            "file_format": "tif",
+            "bits": 8,
+            "perc_range_min": 0.0,
+            "perc_range_max": 100.0,
+            "jpeg_quality": 95,
+        },
+    }
     full_pipeline_list.append(method5)
 
     return full_pipeline_list
-    

Check failure on line 100 in /home/runner/work/httomo/httomo/httomo/common.py

See this annotation in the file changed.

@github-actions github-actions / Black

/home/runner/work/httomo/httomo/httomo/common.py#L86-L100

 
 @dataclass
 class PlatformSection:
     """
     Data class to represent a section of the pipeline. Section can combine methods
-    if they run on the same platform (cpu or gpu) and have the same pattern. 
+    if they run on the same platform (cpu or gpu) and have the same pattern.
     The sections can be further divided if necessary if the results of the method
-    needed to be saved. 
-    NOTE: More fine division of sections into subsections will slow down 
+    needed to be saved.
+    NOTE: More fine division of sections into subsections will slow down
     the pipeline.
 
     Mainly used to iterate through GPU memory in chunks.
 
     Attributes

Check failure on line 154 in /home/runner/work/httomo/httomo/httomo/common.py

See this annotation in the file changed.

@github-actions github-actions / Black

/home/runner/work/httomo/httomo/httomo/common.py#L136-L154

         Dict containing extra params unrelated to wrapped packages but related to httomo
     save_result : bool
         save the result into intermediate dataset
     task_idx: int
         Index of the local task in the section being run
-    task_idx_global: int 
+    task_idx_global: int
         Index of the global task (method) in the pipeline
     package_name: str
         The name of the package the method is imported from
     method_name: str
         The name of the method being executed
     global_statistics: bool
-        Whether global statistics needs to be calculated on the output of the method.        
+        Whether global statistics needs to be calculated on the output of the method.
     """
 
     dict_params_method: Dict[str, Any] = field(default_factory=dict)
     data_in: str = field(default_factory=str)
     data_out: Union[str, List[str]] = field(default_factory=str)

Check failure on line 174 in /home/runner/work/httomo/httomo/httomo/common.py

See this annotation in the file changed.

@github-actions github-actions / Black

/home/runner/work/httomo/httomo/httomo/common.py#L164-L174

 class PreProcessInfo:
     """
     Class holding execution info for each method in the pre-processing stage
     of the pipeline
     """
+
     params: Dict[str, Any]
     method_name: str
     module_path: str
     wrapper_func: Callable
 

Check failure on line 184 in /home/runner/work/httomo/httomo/httomo/common.py

See this annotation in the file changed.

@github-actions github-actions / Black

/home/runner/work/httomo/httomo/httomo/common.py#L175-L184

 @dataclass
 class LoaderInfo:
     """
     Class holding execution info for the loader
     """
+
     params: Dict[str, Any]
     method_name: str
     method_func: Callable
     pattern: Pattern

Check failure on line 47 in /home/runner/work/httomo/httomo/httomo/cli.py

See this annotation in the file changed.

@github-actions github-actions / Black

/home/runner/work/httomo/httomo/httomo/cli.py#L36-L47

     "yaml_config", type=click.Path(exists=True, dir_okay=False, path_type=Path)
 )
 @click.argument(
     "in_data_file",
     type=click.Path(exists=True, dir_okay=False, path_type=Path),
-    required=False, default=None,
+    required=False,
+    default=None,
 )
 def check(yaml_config: Path, in_data_file: Path = None):
     """Check a YAML pipeline file for errors."""
     in_data = str(in_data_file) if isinstance(in_data_file, PurePath) else None
     return validate_yaml_config(yaml_config, in_data)

Check failure on line 107 in /home/runner/work/httomo/httomo/httomo/cli.py

See this annotation in the file changed.

@github-actions github-actions / Black

/home/runner/work/httomo/httomo/httomo/cli.py#L76-L107

 )
 @click.option(
     "--max-cpu-slices",
     type=click.INT,
     default=64,
-    help="Maximum number of slices to use for a block for CPU-only sections (default: 64)"
+    help="Maximum number of slices to use for a block for CPU-only sections (default: 64)",
 )
 @click.option(
     "--max-memory",
     type=click.STRING,
     default="0",
-    help="Limit the amount of memory used by the pipeline to the given memory (supports strings like 3.2G or bytes)"
+    help="Limit the amount of memory used by the pipeline to the given memory (supports strings like 3.2G or bytes)",
 )
 @click.option(
     "--monitor",
     type=click.STRING,
     multiple=True,
     default=[],
-    help=("Add monitor to the runner (can be given multiple times). " +
-          f"Available monitors: {', '.join(MONITORS_MAP.keys())}")
+    help=(
+        "Add monitor to the runner (can be given multiple times). "
+        + f"Available monitors: {', '.join(MONITORS_MAP.keys())}"
+    ),
 )
 @click.option(
     "--monitor-output",
-    type=click.File('w'),
+    type=click.File("w"),
     default=sys.stdout,
-    help="File to store the monitoring output. Defaults to '-', which denotes stdout"
+    help="File to store the monitoring output. Defaults to '-', which denotes stdout",
 )
 def run(
     in_data_file: Path,
     yaml_config: Path,
     out_dir: Path,

Check failure on line 75 in /home/runner/work/httomo/httomo/httomo/data/hdf/_utils/reslice.py

See this annotation in the file changed.

@github-actions github-actions / Black

/home/runner/work/httomo/httomo/httomo/data/hdf/_utils/reslice.py#L64-L75

     # and we concatenate them again across the resliced dimension
     new_data = numpy.concatenate(
         mpiutil.alltoall(to_scatter), axis=current_slice_dim - 1
     )
 
-    start_idx = 0 if comm.rank == 0 else split_indices[comm.rank-1]
+    start_idx = 0 if comm.rank == 0 else split_indices[comm.rank - 1]
     return new_data, next_slice_dim, start_idx
 
 
 def reslice_filebased(
     data: numpy.ndarray,

Check failure on line 107 in /home/runner/work/httomo/httomo/httomo/data/hdf/_utils/reslice.py

See this annotation in the file changed.

@github-actions github-actions / Black

/home/runner/work/httomo/httomo/httomo/data/hdf/_utils/reslice.py#L96-L107

     angles : ndarray
         Angles of the loaded dataset.
     detector_x : int
         det_x (horizontal) detector of the loaded dataset.
     detector_y : int
-        det_y (vertical) detector of the loaded dataset.        
+        det_y (vertical) detector of the loaded dataset.
     comm : Comm
         The MPI communicator to be used.
     Returns:
     tuple[numpy.ndarray, int, int]:
         A tuple containing the resliced data and the dimension along which it is

Check failure on line 123 in /home/runner/work/httomo/httomo/httomo/data/hdf/_utils/reslice.py

See this annotation in the file changed.

@github-actions github-actions / Black

/home/runner/work/httomo/httomo/httomo/data/hdf/_utils/reslice.py#L112-L123

 
     # Calculate the chunk size for the resliced data
     slices_no_in_chunks = 1
     chunks_data = list(data_shape)
     chunks_data[next_slice_dim - 1] = slices_no_in_chunks
-    
+
     log_once(
         "<-------Reslicing/rechunking the data-------->",
         level=logging.DEBUG,
     )
     # Pass the current slicing dim so then data can be gathered and assembled

Check failure on line 146 in /home/runner/work/httomo/httomo/httomo/data/hdf/_utils/reslice.py

See this annotation in the file changed.

@github-actions github-actions / Black

/home/runner/work/httomo/httomo/httomo/data/hdf/_utils/reslice.py#L136-L146

         f"{reslice_dir}/intermediate.h5", next_slice_dim, "/data", comm=comm
     )
 
     return data, next_slice_dim, start_idx
 
+
 def single_sino_reslice(
     data: numpy.ndarray,
     idx: int,
 ) -> Optional[numpy.ndarray]:
     if mpiutil.size == 1:

Check failure on line 168 in /home/runner/work/httomo/httomo/httomo/data/hdf/_utils/reslice.py

See this annotation in the file changed.

@github-actions github-actions / Black

/home/runner/work/httomo/httomo/httomo/data/hdf/_utils/reslice.py#L157-L168

     data_shape = chunk.get_data_shape(data, 0)
 
     if mpiutil.rank == 0:
         # Define the numpy array that will hold the single sinogram that has
         # been gathered from data from all MPI processes
-        recvbuf = numpy.empty(data_shape[0]*data_shape[2], dtype=NUMPY_DTYPE)
+        recvbuf = numpy.empty(data_shape[0] * data_shape[2], dtype=NUMPY_DTYPE)
     else:
         recvbuf = None
     # From the full projections that an MPI process has, send the data that
     # contributes to the sinogram at height `idx` (ie, send a "partial
     # sinogram")

Check failure on line 182 in /home/runner/work/httomo/httomo/httomo/data/hdf/_utils/reslice.py

See this annotation in the file changed.

@github-actions github-actions / Black

/home/runner/work/httomo/httomo/httomo/data/hdf/_utils/reslice.py#L169-L182

         data[:, idx, :].reshape(data[:, idx, :].size), dtype=NUMPY_DTYPE
     )
     sizes_rec = mpiutil.comm.gather(sendbuf.size)
     # Gather the data into the rank 0 process
     mpiutil.comm.Gatherv(
-        (sendbuf, data.shape[0]*data.shape[2], MPI_DTYPE),
+        (sendbuf, data.shape[0] * data.shape[2], MPI_DTYPE),
         (recvbuf, sizes_rec, MPI_DTYPE),
-        root=0
+        root=0,
     )
 
     if mpiutil.rank == 0:
         assert recvbuf is not None
         return recvbuf.reshape((data_shape[0], data_shape[2]))

Check failure on line 8 in /home/runner/work/httomo/httomo/httomo/globals.py

See this annotation in the file changed.

@github-actions github-actions / Black

/home/runner/work/httomo/httomo/httomo/globals.py#L1-L8

 import os
 from pathlib import Path
 
-run_out_dir: os.PathLike = Path('.')
+run_out_dir: os.PathLike = Path(".")
 gpu_id: int = -1
 # maximum slices to use in CPU-only section
 MAX_CPU_SLICES: int = 64

Check failure on line 17 in /home/runner/work/httomo/httomo/httomo/logger.py

See this annotation in the file changed.

@github-actions github-actions / Black

/home/runner/work/httomo/httomo/httomo/logger.py#L9-L17

     verbose_logfile_path = out_path / "debug.log"
     logger.remove(0)
     # Concise logs displayed in terminal
     logger.add(sink=sys.stdout, level="INFO", colorize=True, format="{message}")
     # Concise logs written to file
-    logger.add(sink=concise_logfile_path, level="INFO", colorize=False, format="{message}")
+    logger.add(
+        sink=concise_logfile_path, level="INFO", colorize=False, format="{message}"
+    )
     # Verbose logs written to file
     logger.add(sink=verbose_logfile_path, level="DEBUG", colorize=False, enqueue=True)

Check failure on line 133 in /home/runner/work/httomo/httomo/httomo/data/mpiutil.py

See this annotation in the file changed.

@github-actions github-actions / Black

/home/runner/work/httomo/httomo/httomo/data/mpiutil.py#L108-L133

 
     # create a new contiguous MPI datatype by repeating the input type by this common length
     factor = (
         arrays[0].shape[0]
         if dim0equal
-        else arrays[0].shape[1]
-        if dim1equal
-        else arrays[0].shape[2]
+        else arrays[0].shape[1] if dim1equal else arrays[0].shape[2]
     )
     dtype1 = dtype.Create_contiguous(factor).Commit()
     # sanity check - this should always pass
     assert all(s % factor == 0 for s in sizes_send), "Size does not divide evenly"
     assert all(s % factor == 0 for s in sizes_rec), "Size does not divide evenly"
     sizes_send1 = [s // factor for s in sizes_send]
     sizes_rec1 = [s // factor for s in sizes_rec]
 
     # now send the same data, but with the adjusted size+datatype (output is identical)
-    comm.Alltoallv(
-        (fullinput, sizes_send1, dtype1), (fulloutput, sizes_rec1, dtype1)
-    )
+    comm.Alltoallv((fullinput, sizes_send1, dtype1), (fulloutput, sizes_rec1, dtype1))
 
     # build list of output arrays
     cumsizes = np.cumsum(sizes_rec)
     cumsizes = [0, *cumsizes[:-1]]
     ret = list()

Check failure on line 23 in /home/runner/work/httomo/httomo/httomo/data/hdf/loaders.py

See this annotation in the file changed.

@github-actions github-actions / Black

/home/runner/work/httomo/httomo/httomo/data/hdf/loaders.py#L13-L23

 
 
 __all__ = [
     "standard_tomo",
 ]
+
 
 @dataclass
 class LoaderData:
     data: ndarray
     flats: ndarray

Check failure on line 28 in /home/runner/work/httomo/httomo/httomo/method_wrappers/reconstruction.py

See this annotation in the file changed.

@github-actions github-actions / Black

/home/runner/work/httomo/httomo/httomo/method_wrappers/reconstruction.py#L16-L28

     def should_select_this_class(cls, module_path: str, method_name: str) -> bool:
         return module_path.endswith(".algorithm")
 
     def _preprocess_data(self, block: DataSetBlock) -> DataSetBlock:
         # this is essential for the angles cutting below to be valid
-        assert self.pattern == Pattern.sinogram, "reconstruction methods must be sinogram"
-        
+        assert (
+            self.pattern == Pattern.sinogram
+        ), "reconstruction methods must be sinogram"
+
         # for 360 degrees data the angular dimension will be truncated while angles are not.
         # Truncating angles if the angular dimension has got a different size
         datashape0 = block.data.shape[0]
         if datashape0 != len(block.angles_radians):
             block.angles_radians = block.angles_radians[0:datashape0]

Check failure on line 54 in /home/runner/work/httomo/httomo/httomo/method_wrappers/images.py

See this annotation in the file changed.

@github-actions github-actions / Black

/home/runner/work/httomo/httomo/httomo/method_wrappers/images.py#L42-L54

             **kwargs,
         )
         self["out_dir"] = out_dir if out_dir is not None else httomo.globals.run_out_dir
         if "comm_rank" in self.parameters:
             raise ValueError(
-                "save_to_images with the comm_rank parameter is broken. " +
-                "Please upgrade to the latest version, taking an offset parameter"
+                "save_to_images with the comm_rank parameter is broken. "
+                + "Please upgrade to the latest version, taking an offset parameter"
             )
 
     # Images execute is leaving original data on the device where it is,
     # but gives the method a CPU copy of the data.
     def execute(

Check failure on line 70 in /home/runner/work/httomo/httomo/httomo/method_wrappers/images.py

See this annotation in the file changed.

@github-actions github-actions / Black

/home/runner/work/httomo/httomo/httomo/method_wrappers/images.py#L59-L70

         if "offset" in self.parameters:
             config_params = {
                 **self._config_params,
                 "offset": block.global_index[_get_slicing_dim(self.pattern) - 1],
             }
-            
+
         args = self._build_kwargs(self._transform_params(config_params), block)
         if block.is_gpu:
             with catchtime() as t:
                 # give method a CPU copy of the data
                 args[self.parameters[0]] = xp.asnumpy(block.data)

Check failure on line 56 in /home/runner/work/httomo/httomo/httomo/method_wrappers/save_intermediate.py

See this annotation in the file changed.

@github-actions github-actions / Black

/home/runner/work/httomo/httomo/httomo/method_wrappers/save_intermediate.py#L14-L56

 import h5py
 import numpy as np
 
 
 class SaveIntermediateFilesWrapper(GenericMethodWrapper):
-    
+
     @classmethod
     def should_select_this_class(cls, module_path: str, method_name: str) -> bool:
         return method_name == "save_intermediate_data"
-    
-    def __init__(self, 
-                 method_repository: MethodRepository, 
-                 module_path: str, 
-                 method_name: str, 
-                 comm: Comm, 
-                 save_result: Optional[bool] = None,
-                 output_mapping: Dict[str, str] = {}, 
-                 out_dir: Optional[os.PathLike] = None,
-                 prev_method: Optional[MethodWrapper] = None,
-                 loader: Optional[LoaderInterface] = None,
-                 **kwargs):
-        super().__init__(method_repository, module_path, method_name, comm, save_result, output_mapping, **kwargs)
+
+    def __init__(
+        self,
+        method_repository: MethodRepository,
+        module_path: str,
+        method_name: str,
+        comm: Comm,
+        save_result: Optional[bool] = None,
+        output_mapping: Dict[str, str] = {},
+        out_dir: Optional[os.PathLike] = None,
+        prev_method: Optional[MethodWrapper] = None,
+        loader: Optional[LoaderInterface] = None,
+        **kwargs,
+    ):
+        super().__init__(
+            method_repository,
+            module_path,
+            method_name,
+            comm,
+            save_result,
+            output_mapping,
+            **kwargs,
+        )
         assert loader is not None
         self._loader = loader
         assert prev_method is not None
 
         filename = f"{prev_method.task_id}-{prev_method.package_name}-{prev_method.method_name}"
         if prev_method.recon_algorithm is not None:
             filename += f"-{prev_method.recon_algorithm}"
-        
+
         if out_dir is None:
             out_dir = httomo.globals.run_out_dir
         assert out_dir is not None
-        self._file = h5py.File(f"{out_dir}/{filename}.h5", "w", driver="mpio", comm=comm)
+        self._file = h5py.File(
+            f"{out_dir}/{filename}.h5", "w", driver="mpio", comm=comm
+        )
         # make sure file gets closed properly
         weakref.finalize(self, self._file.close)
-        
+
     def execute(self, block: DataSetBlock) -> DataSetBlock:
         # we overwrite the whole execute method here, as we do not need any of the helper
         # methods from the Generic Wrapper
         # What we know:
         # - we do not transfer the dataset as a whole to CPU - only the data and angles locally

Check failure on line 80 in /home/runner/work/httomo/httomo/httomo/method_wrappers/save_intermediate.py

See this annotation in the file changed.

@github-actions github-actions / Black

/home/runner/work/httomo/httomo/httomo/method_wrappers/save_intermediate.py#L70-L80

             path="/data",
             detector_x=self._loader.detector_x,
             detector_y=self._loader.detector_y,
             angles=block.angles,
         )
-        
+
         if block.is_last_in_chunk:
             self._file.close()
 
         return block

Check failure on line 23 in /home/runner/work/httomo/httomo/httomo/methods_database/packages/external/httomolibgpu/supporting_funcs/misc/rescale.py

See this annotation in the file changed.

@github-actions github-actions / Black

/home/runner/work/httomo/httomo/httomo/methods_database/packages/external/httomolibgpu/supporting_funcs/misc/rescale.py#L5-L23

     "_calc_memory_bytes_rescale_to_int",
 ]
 
 
 def _calc_memory_bytes_rescale_to_int(
-        non_slice_dims_shape: Tuple[int, int],
-        dtype: np.dtype,
-        **kwargs,
+    non_slice_dims_shape: Tuple[int, int],
+    dtype: np.dtype,
+    **kwargs,
 ) -> Tuple[int, int]:
     bits: int = kwargs["bits"]
     if bits == 8:
         itemsize = 1
     elif bits == 16:
         itemsize = 2
     else:
         itemsize = 4
     safety = 128
-    return (int(np.prod(non_slice_dims_shape)) * (dtype.itemsize + itemsize) + safety, 0)
+    return (
+        int(np.prod(non_slice_dims_shape)) * (dtype.itemsize + itemsize) + safety,
+        0,
+    )

Check failure on line 73 in /home/runner/work/httomo/httomo/httomo/method_wrappers/stats_calc.py

See this annotation in the file changed.

@github-actions github-actions / Black

/home/runner/work/httomo/httomo/httomo/method_wrappers/stats_calc.py#L47-L73

         self._elements: int = 0
 
     def _transfer_data(self, dataset: DataSetBlock):
         # don't transfer anything (either way) at this point
         return dataset
-    
+
     def _run_method(self, dataset: DataSetBlock, args: Dict[str, Any]) -> DataSetBlock:
-        # transfer data to GPU if we can / have it available (always faster), 
+        # transfer data to GPU if we can / have it available (always faster),
         # but don't want to fail if we don't have a GPU (underlying method works for both)
         # and don't touch original dataset
         if gpu_enabled and dataset.is_cpu:
             with catchtime() as t:
                 args[self._parameters[0]] = xp.asarray(dataset.data)
             self._gpu_time_info.host2device += t.elapsed
         ret = self._method(**args)
         return self._process_return_type(ret, dataset)
-        
 
-    def _process_return_type(
-        self, ret: Any, input_block: DataSetBlock
-    ) -> DataSetBlock:
+    def _process_return_type(self, ret: Any, input_block: DataSetBlock) -> DataSetBlock:
         assert isinstance(ret, tuple), "expected return type is a tuple"
         assert len(ret) == 4, "A 4-tuple of stats values is expected"
 
         self._min = min(self._min, float(ret[0]))
         self._max = max(self._max, float(ret[1]))