Skip to content

Commit

Permalink
Merge pull request #3671 from DimitriPapadopoulos/B
Browse files Browse the repository at this point in the history
STY: Apply ruff/flake8-bugbear rules (B)
  • Loading branch information
effigies authored Oct 6, 2024
2 parents 25469f1 + fdab8ef commit 32a711e
Show file tree
Hide file tree
Showing 16 changed files with 40 additions and 48 deletions.
4 changes: 2 additions & 2 deletions nipype/algorithms/tests/test_modelgen.py
Original file line number Diff line number Diff line change
Expand Up @@ -122,7 +122,7 @@ def test_modelgen_spm_concat(tmpdir):
s = SpecifySPMModel()
s.inputs.input_units = "secs"
s.inputs.concatenate_runs = True
setattr(s.inputs, "output_units", "secs")
s.inputs.output_units = "secs"
assert s.inputs.output_units == "secs"
s.inputs.functional_runs = [filename1, filename2]
s.inputs.time_repetition = 6
Expand All @@ -147,7 +147,7 @@ def test_modelgen_spm_concat(tmpdir):
)

# Test case of scans as output units instead of seconds
setattr(s.inputs, "output_units", "scans")
s.inputs.output_units = "scans"
assert s.inputs.output_units == "scans"
s.inputs.subject_info = deepcopy(info)
res = s.run()
Expand Down
4 changes: 2 additions & 2 deletions nipype/interfaces/afni/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -3244,11 +3244,11 @@ def _run_interface(self, runtime):
for line in runtime.stdout.split("\n")
if line.strip().startswith("GCOR = ")
][-1]
setattr(self, "_gcor", float(gcor_line[len("GCOR = ") :]))
self._gcor = float(gcor_line[len("GCOR = ") :])
return runtime

def _list_outputs(self):
return {"out": getattr(self, "_gcor")}
return {"out": self._gcor}


class AxializeInputSpec(AFNICommandInputSpec):
Expand Down
2 changes: 1 addition & 1 deletion nipype/interfaces/ants/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -536,7 +536,7 @@ def _format_arg(self, opt, spec, val):
return super()._format_arg(opt, spec, val)

def _list_outputs(self):
return getattr(self, "_output")
return self._output


class AverageAffineTransformInputSpec(ANTSCommandInputSpec):
Expand Down
2 changes: 1 addition & 1 deletion nipype/interfaces/base/support.py
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,7 @@ def __exit__(self, exc_type, exc_value, exc_tb):
traceback.format_exception(exc_type, exc_value, exc_tb)
)
# Gather up the exception arguments and append nipype info.
exc_args = exc_value.args if getattr(exc_value, "args") else tuple()
exc_args = exc_value.args or ()
exc_args += (
f"An exception of type {exc_type.__name__} occurred while "
f"running interface {self._runtime.interface}.",
Expand Down
8 changes: 4 additions & 4 deletions nipype/interfaces/elastix/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -164,18 +164,18 @@ def _run_interface(self, runtime):

def _list_outputs(self):
outputs = self.output_spec().get()
outputs["output_file"] = getattr(self, "_out_file")
outputs["output_file"] = self._out_file
return outputs

def _get_outfile(self):
val = getattr(self, "_out_file")
val = self._out_file
if val is not None and val != "":
return val

if isdefined(self.inputs.output_file):
setattr(self, "_out_file", self.inputs.output_file)
self._out_file = self.inputs.output_file
return self.inputs.output_file

out_file = op.abspath(op.basename(self.inputs.transform_file))
setattr(self, "_out_file", out_file)
self._out_file = out_file
return out_file
12 changes: 6 additions & 6 deletions nipype/interfaces/fsl/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -929,7 +929,7 @@ def _run_interface(self, runtime):
float(r) for r in out["translations"].strip().split(" ")
]

setattr(self, "_results", outputs)
self._results = outputs
return runtime

def _list_outputs(self):
Expand Down Expand Up @@ -2513,8 +2513,8 @@ def _format_arg(self, name, trait_spec, value):

def _parse_inputs(self, skip=None):
fname, ext = op.splitext(self.inputs.in_coords)
setattr(self, "_in_file", fname)
setattr(self, "_outformat", ext[1:])
self._in_file = fname
self._outformat = ext[1:]
first_args = super()._parse_inputs(skip=["in_coords", "out_file"])

second_args = fname + ".txt"
Expand Down Expand Up @@ -2580,11 +2580,11 @@ def _coords_to_trk(self, points, out_file):

def _overload_extension(self, value, name):
if name == "out_file":
return "{}.{}".format(value, getattr(self, "_outformat"))
return "{}.{}".format(value, self._outformat)

def _run_interface(self, runtime):
fname = getattr(self, "_in_file")
outformat = getattr(self, "_outformat")
fname = self._in_file
outformat = self._outformat
tmpfile = None

if outformat == "vtk":
Expand Down
26 changes: 9 additions & 17 deletions nipype/interfaces/spm/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -849,30 +849,22 @@ def _make_matlab_command(self, _):

def aggregate_outputs(self, runtime=None):
outputs = self._outputs()
setattr(outputs, "thresholded_map", self._gen_thresholded_map_filename())
setattr(outputs, "pre_topo_fdr_map", self._gen_pre_topo_map_filename())
outputs.thresholded_map = self._gen_thresholded_map_filename()
outputs.pre_topo_fdr_map = self._gen_pre_topo_map_filename()
for line in runtime.stdout.split("\n"):
if line.startswith("activation_forced = "):
setattr(
outputs,
"activation_forced",
line[len("activation_forced = ") :].strip() == "1",
outputs.activation_forced = (
line[len("activation_forced = ") :].strip() == "1"
)
elif line.startswith("n_clusters = "):
setattr(
outputs, "n_clusters", int(line[len("n_clusters = ") :].strip())
)
outputs.n_clusters = int(line[len("n_clusters = ") :].strip())
elif line.startswith("pre_topo_n_clusters = "):
setattr(
outputs,
"pre_topo_n_clusters",
int(line[len("pre_topo_n_clusters = ") :].strip()),
outputs.pre_topo_n_clusters = int(
line[len("pre_topo_n_clusters = ") :].strip()
)
elif line.startswith("cluster_forming_thr = "):
setattr(
outputs,
"cluster_forming_thr",
float(line[len("cluster_forming_thr = ") :].strip()),
outputs.cluster_forming_thr = float(
line[len("cluster_forming_thr = ") :].strip()
)
return outputs

Expand Down
2 changes: 1 addition & 1 deletion nipype/interfaces/tests/test_io.py
Original file line number Diff line number Diff line change
Expand Up @@ -517,7 +517,7 @@ def test_datasink_copydir_2(_temp_analyze_files, tmpdir):
base_directory=tmpdir.mkdir("basedir").strpath, parameterization=False
)
ds.inputs.remove_dest_dir = True
setattr(ds.inputs, "outdir", pth)
ds.inputs.outdir = pth
ds.run()
sep = os.path.sep
assert not tmpdir.join("basedir", pth.split(sep)[-1], fname).check()
Expand Down
4 changes: 2 additions & 2 deletions nipype/interfaces/utility/wrappers.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ def __init__(

super().__init__(**inputs)
if function:
if hasattr(function, "__call__"):
if callable(function):
try:
self.inputs.function_str = getsource(function)
except OSError:
Expand Down Expand Up @@ -101,7 +101,7 @@ def __init__(

def _set_function_string(self, obj, name, old, new):
if name == "function_str":
if hasattr(new, "__call__"):
if callable(new):
function_source = getsource(new)
fninfo = new.__code__
elif isinstance(new, (str, bytes)):
Expand Down
6 changes: 3 additions & 3 deletions nipype/pipeline/engine/tests/test_engine.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@
def test_1mod(iterables, expected):
pipe = pe.Workflow(name="pipe")
mod1 = pe.Node(interface=EngineTestInterface(), name="mod1")
setattr(mod1, "iterables", iterables["1"])
mod1.iterables = iterables["1"]
pipe.add_nodes([mod1])
pipe._flatgraph = pipe._create_flat_graph()
pipe._execgraph = pe.generate_expanded_graph(deepcopy(pipe._flatgraph))
Expand All @@ -49,7 +49,7 @@ def test_2mods(iterables, expected):
mod1 = pe.Node(interface=EngineTestInterface(), name="mod1")
mod2 = pe.Node(interface=EngineTestInterface(), name="mod2")
for nr in ["1", "2"]:
setattr(eval("mod" + nr), "iterables", iterables[nr])
eval("mod" + nr).iterables = iterables[nr]
pipe.connect([(mod1, mod2, [("output1", "input2")])])
pipe._flatgraph = pipe._create_flat_graph()
pipe._execgraph = pe.generate_expanded_graph(deepcopy(pipe._flatgraph))
Expand Down Expand Up @@ -87,7 +87,7 @@ def test_3mods(iterables, expected, connect):
mod2 = pe.Node(interface=EngineTestInterface(), name="mod2")
mod3 = pe.Node(interface=EngineTestInterface(), name="mod3")
for nr in ["1", "2", "3"]:
setattr(eval("mod" + nr), "iterables", iterables[nr])
eval("mod" + nr).iterables = iterables[nr]
if connect == ("1-2", "2-3"):
pipe.connect(
[
Expand Down
2 changes: 1 addition & 1 deletion nipype/pipeline/engine/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -852,7 +852,7 @@ def _identity_nodes(graph, include_iterables):
node
for node in nx.topological_sort(graph)
if isinstance(node.interface, IdentityInterface)
and (include_iterables or getattr(node, "iterables") is None)
and (include_iterables or node.iterables is None)
]


Expand Down
2 changes: 1 addition & 1 deletion nipype/pipeline/plugins/debug.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ def __init__(self, plugin_args=None):
if (
plugin_args
and "callable" in plugin_args
and hasattr(plugin_args["callable"], "__call__")
and callable(plugin_args["callable"])
):
self._callable = plugin_args["callable"]
else:
Expand Down
2 changes: 1 addition & 1 deletion nipype/utils/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -353,7 +353,7 @@ def _mock():

# Older versions of xvfbwrapper used vdisplay_num
if not hasattr(self._display, "new_display"):
setattr(self._display, "new_display", self._display.vdisplay_num)
self._display.new_display = self._display.vdisplay_num
return self.get_display()

def stop_display(self):
Expand Down
6 changes: 3 additions & 3 deletions nipype/utils/profiler.py
Original file line number Diff line number Diff line change
Expand Up @@ -174,9 +174,9 @@ def log_nodes_cb(node, status):
status_dict = {
"name": node.name,
"id": node._id,
"start": getattr(node.result.runtime, "startTime"),
"finish": getattr(node.result.runtime, "endTime"),
"duration": getattr(node.result.runtime, "duration"),
"start": node.result.runtime.startTime,
"finish": node.result.runtime.endTime,
"duration": node.result.runtime.duration,
"runtime_threads": getattr(node.result.runtime, "cpu_percent", "N/A"),
"runtime_memory_gb": getattr(node.result.runtime, "mem_peak_gb", "N/A"),
"estimated_memory_gb": node.mem_gb,
Expand Down
2 changes: 1 addition & 1 deletion nipype/utils/provenance.py
Original file line number Diff line number Diff line change
Expand Up @@ -300,7 +300,7 @@ def write_provenance(results, filename="provenance", format="all"):
import traceback

err_msg = traceback.format_exc()
if getattr(e, "args"):
if e.args:
err_msg += "\n\nException arguments:\n" + ", ".join(
['"%s"' % arg for arg in e.args]
)
Expand Down
4 changes: 2 additions & 2 deletions tools/checkspecs.py
Original file line number Diff line number Diff line change
Expand Up @@ -287,7 +287,7 @@ def test_specs(self, uri):
continue
parent_metadata = []
if "parent" in trait.__dict__:
parent_metadata = list(getattr(trait, "parent").__dict__.keys())
parent_metadata = list(trait.parent.__dict__)
if (
key
not in allowed_keys
Expand Down Expand Up @@ -375,7 +375,7 @@ def test_specs(self, uri):
continue
parent_metadata = []
if "parent" in trait.__dict__:
parent_metadata = list(getattr(trait, "parent").__dict__.keys())
parent_metadata = list(trait.parent.__dict__)
if (
key
not in allowed_keys
Expand Down

0 comments on commit 32a711e

Please sign in to comment.