Skip to content

Commit

Permalink
Merge pull request populse#342 from DimitriPapadopoulos/codespell
Browse files Browse the repository at this point in the history
Fix typos found by codespell
  • Loading branch information
denisri committed Dec 8, 2023
2 parents 03c370e + 0414b49 commit d084072
Show file tree
Hide file tree
Showing 7 changed files with 31 additions and 31 deletions.
4 changes: 2 additions & 2 deletions capsul/__main__.py
Original file line number Diff line number Diff line change
Expand Up @@ -157,8 +157,8 @@ def set_executable_cmd_args(executable, args):
from soma.qt_gui.qt_backend import Qt
from capsul.qt_gui.widgets import PipelineDeveloperView

# WARNING: QApplication should always be instanciated before aims PluginLoader
# has been called otherwise another QCoreApplication is instanciated
# WARNING: QApplication should always be instantiated before aims PluginLoader
# has been called otherwise another QCoreApplication is instantiated
# that can conflict with the QApplication created.
Qt.QApplication.setAttribute(Qt.Qt.AA_ShareOpenGLContexts, True)
app = Qt.QApplication(sys.argv)
Expand Down
30 changes: 15 additions & 15 deletions capsul/dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -711,7 +711,7 @@ def __getitem__(self, key):
super().__setattr__("_item", key)
else:
raise Exception(
"invalid metdata modification, attribute too deep: "
"invalid metadata modification, attribute too deep: "
f"{self._parameter}, {self._item}, {key}"
)
return self
Expand All @@ -733,23 +733,23 @@ def value(self):
def __setitem__(self, key, value):
if isinstance(value, MetadataModification):
if self._item:
raise Exception(f"invalid metdata copy, unexpected item: {self._item}")
raise Exception(f"invalid metadata copy, unexpected item: {self._item}")
if not self._parameter:
if not value._parameter:
raise Exception("invalid metdata copy, no source parameter")
raise Exception("invalid metadata copy, no source parameter")
if value._item:
raise Exception(
"invalid metdata copy, source item {value._item} cannot be copied to a whole parameter"
"invalid metadata copy, source item {value._item} cannot be copied to a whole parameter"
)
self._copy_all(
dest_parameters=key,
source_parameters=value._parameter,
)
else:
if not value._parameter:
raise Exception("invalid metdata copy, no source parameter")
raise Exception("invalid metadata copy, no source parameter")
if not value._item:
raise Exception("invalid metdata copy, no source item")
raise Exception("invalid metadata copy, no source item")
self._copy_item(
parameters=self._parameter,
items=key,
Expand All @@ -758,10 +758,10 @@ def __setitem__(self, key, value):
)
else:
if not self._parameter:
raise Exception("invalid metdata modification, no parameter")
raise Exception("invalid metadata modification, no parameter")
if self._item:
raise Exception(
f"invalid metdata modification, unexpected item: {self._item}"
f"invalid metadata modification, unexpected item: {self._item}"
)
self._set(
parameters=self._parameter,
Expand Down Expand Up @@ -834,9 +834,9 @@ def _items(self, items):

def unused(self, value=True):
if not self._parameter:
raise Exception("invalid metdata modification, no parameter")
raise Exception("invalid metadata modification, no parameter")
if not self._item:
raise Exception("invalid metdata modification, no item")
raise Exception("invalid metadata modification, no item")
if self.executable.activated:
for parameter in self._parameters(self._parameter):
for item in self._items(self._item):
Expand All @@ -849,9 +849,9 @@ def used(self, value=True):

def append(self, value, sep="_"):
if not self._parameter:
raise Exception("invalid metdata modification, no parameter")
raise Exception("invalid metadata modification, no parameter")
if not self._item:
raise Exception("invalid metdata modification, no item")
raise Exception("invalid metadata modification, no item")
if self.executable.activated:
if isinstance(value, MetadataModification):
value = value.value()
Expand All @@ -871,9 +871,9 @@ def append(self, value, sep="_"):

def prepend(self, value, sep="_"):
if not self._parameter:
raise Exception("invalid metdata modification, no parameter")
raise Exception("invalid metadata modification, no parameter")
if not self._item:
raise Exception("invalid metdata modification, no item")
raise Exception("invalid metadata modification, no item")
if isinstance(value, MetadataModification):
value = value.value()
if self.executable.activated:
Expand Down Expand Up @@ -1337,7 +1337,7 @@ def path_for_parameters(self, executable, parameters=None):
# print("!-" * 40)
parameters_equivalence = find_parameters_equivalence(executable)
for parameter in parameters:
# Ignore path generation for parameters that are equivelent to another one
# Ignore path generation for parameters that are equivalent to another one
equivalent = parameters_equivalence.get(executable, {}).get(parameter)
if equivalent and equivalent != parameter:
# print(f"!skip! {parameter} => {equivalent}")
Expand Down
14 changes: 7 additions & 7 deletions capsul/engine/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -413,21 +413,21 @@ def run(self, executable, timeout=None, print_report=False, debug=False, **kwarg
def prepare_pipeline_for_retry(self, pipeline, execution_id):
"""Modify a pipeline given a previous execution to select only the nodes that
weren't successful. Running the pipeline after this step will retry the
execution of faile jobs. This method adds (or modify if it exists) an unselectd
pipeline step called "succesfully_executed" containing all nodes that were
succesfully executed.
""" ""
execution of failed jobs. This method adds (or modifies if it exists) an
unselected pipeline step called "successfully_executed" containing all nodes
that were successfully executed.
"""
successful_nodes = []
for path in self.database.successful_node_paths(self.engine_id, execution_id):
successful_nodes.append(pipeline.node_from_path(path).name)
step_field = None
if pipeline.field("pipeline_steps"):
step_field = pipeline.pipeline_steps.fields("succesfully_executed")
step_field = pipeline.pipeline_steps.fields("successfully_executed")
if step_field is None:
pipeline.add_pipeline_step("succesfully_executed", successful_nodes, False)
pipeline.add_pipeline_step("successfully_executed", successful_nodes, False)
else:
step_field.nodes = successful_nodes
setattr(pipeline.pipeline_steps, "succesfully_executed", False)
setattr(pipeline.pipeline_steps, "successfully_executed", False)


class Workers(Controller):
Expand Down
8 changes: 4 additions & 4 deletions capsul/format.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ def __init__(self):

def __setitem__(self, label, value):
"""
Adds a new fromat. Label is the format name that is displayed to
Adds a new format. Label is the format name that is displayed to
the user. It must be unique among all format labels independently
of character case. value is a list of extensions or a format object.
"""
Expand All @@ -72,7 +72,7 @@ def __setitem__(self, label, value):
def __getitem__(self, label_or_key):
"""
Return a Format instance from its label. Research is case independent.
A KeyError is raised if the fomat is not found.
A KeyError is raised if the format is not found.
"""
return self._formats[label_or_key.lower()]

Expand Down Expand Up @@ -537,11 +537,11 @@ def global_formats():
"gz compressed SPM image",
"SPM image",
],
"aims partialy writable volume formats": [
"aims partially writable volume formats": [
"GIS image",
"NIFTI-1 image",
],
"aims partialy readable volume formats": [
"aims partially readable volume formats": [
"GIS image",
"NIFTI-1 image",
"TIFF image",
Expand Down
2 changes: 1 addition & 1 deletion capsul/in_context/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
"""
The ``in_context`` module provides functions to call some external software from Capsul processes (SPM, FSL, etc.). The main functions perform calls to the software in a similar way as ``subprocess`` functions (:class:`~subprocess.Popen`, :func:`~subprocess.call`, :func:`~subprocess.check_call` and :func:`subprocess.check_output`).
The notable difference is that they use an :class:`~capsul.execution_context.ExecutionContext` object instance to get configuration from.
These functions are only run from within the :meth:`~capsul.process.Process.execute` method of a Process, which gets the context as a paremeter::
These functions are only run from within the :meth:`~capsul.process.Process.execute` method of a Process, which gets the context as a parameter::
from capsul.api import Process
from capsul.in_context.fsl import fsl_check_call
Expand Down
2 changes: 1 addition & 1 deletion capsul/in_context/fsl.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
def execute(self, execution_context):
fsl_check_call(['bet', '-h'], execution_context=execution_context)
For calling FSL command with this module, the first arguent of
For calling FSL command with this module, the first argument of
command line must be the FSL executable without any path nor prefix.
Prefix are used in Neurodebian install. For instance on Ubuntu 16.04
Neurodebian FSL commands are prefixed with "fsl5.0-".
Expand Down
2 changes: 1 addition & 1 deletion capsul/run.py
Original file line number Diff line number Diff line change
Expand Up @@ -116,7 +116,7 @@ def execute_job(database, engine_id, execution_id, job_uuid, debug=False):
if __name__ == "__main__":
if len(sys.argv) != 4:
print(
"Wrong number of paramaters, 3 expected:" f"command={sys.argv}",
"Wrong number of parameters, 3 expected:" f"command={sys.argv}",
file=sys.stderr,
)
sys.exit(1)
Expand Down

0 comments on commit d084072

Please sign in to comment.