Skip to content

Commit

Permalink
Merge pull request #3680 from DimitriPapadopoulos/C4
Browse files Browse the repository at this point in the history
 STY: Apply ruff/flake8-comprehensions rules (C4)
  • Loading branch information
effigies authored Oct 6, 2024
2 parents 6ac81ca + 1747356 commit 25469f1
Show file tree
Hide file tree
Showing 22 changed files with 46 additions and 55 deletions.
2 changes: 1 addition & 1 deletion nipype/algorithms/modelgen.py
Original file line number Diff line number Diff line change
Expand Up @@ -161,7 +161,7 @@ def bids_gen_info(
for bids_event_file in bids_event_files:
with open(bids_event_file) as f:
f_events = csv.DictReader(f, skipinitialspace=True, delimiter="\t")
events = [{k: v for k, v in row.items()} for row in f_events]
events = list(f_events)
if not condition_column:
condition_column = "_trial_type"
for i in events:
Expand Down
6 changes: 3 additions & 3 deletions nipype/algorithms/tests/test_CompCor.py
Original file line number Diff line number Diff line change
Expand Up @@ -287,9 +287,9 @@ def run_cc(
components_metadata = [
line.rstrip().split("\t") for line in metadata_file
]
components_metadata = {
i: j for i, j in zip(components_metadata[0], components_metadata[1])
}
components_metadata = dict(
zip(components_metadata[0], components_metadata[1])
)
assert components_metadata == expected_metadata

return ccresult
Expand Down
2 changes: 1 addition & 1 deletion nipype/interfaces/ants/segmentation.py
Original file line number Diff line number Diff line change
Expand Up @@ -194,7 +194,7 @@ def _format_arg(self, opt, spec, val):
priors_paths[0] % i for i in range(1, n_classes + 1)
]

if not all([os.path.exists(p) for p in priors_paths]):
if not all(os.path.exists(p) for p in priors_paths):
raise FileNotFoundError(
"One or more prior images do not exist: "
"%s." % ", ".join(priors_paths)
Expand Down
2 changes: 1 addition & 1 deletion nipype/interfaces/base/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -486,7 +486,7 @@ def load_inputs_from_json(self, json_file, overwrite=True):
if not overwrite:
def_inputs = list(self.inputs.get_traitsfree().keys())

new_inputs = list(set(list(inputs_dict.keys())) - set(def_inputs))
new_inputs = set(inputs_dict) - set(def_inputs)
for key in new_inputs:
if hasattr(self.inputs, key):
setattr(self.inputs, key, inputs_dict[key])
Expand Down
4 changes: 2 additions & 2 deletions nipype/interfaces/cmtk/cmtk.py
Original file line number Diff line number Diff line change
Expand Up @@ -248,7 +248,7 @@ def cmat(
axis=1,
)
)
G.nodes[int(u)]["dn_position"] = tuple([xyz[0], xyz[2], -xyz[1]])
G.nodes[int(u)]["dn_position"] = (xyz[0], xyz[2], -xyz[1])

if intersections:
iflogger.info("Filtering tractography from intersections")
Expand Down Expand Up @@ -1070,7 +1070,7 @@ def create_nodes(roi_file, resolution_network_file, out_filename):
np.where(np.flipud(roiData) == int(d["dn_correspondence_id"])), axis=1
)
)
G.nodes[int(u)]["dn_position"] = tuple([xyz[0], xyz[2], -xyz[1]])
G.nodes[int(u)]["dn_position"] = (xyz[0], xyz[2], -xyz[1])
with open(out_filename, 'wb') as f:
pickle.dump(G, f, pickle.HIGHEST_PROTOCOL)
return out_filename
Expand Down
2 changes: 1 addition & 1 deletion nipype/interfaces/diffusion_toolkit/dti.py
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,7 @@ class DTIRecon(CommandLine):
def _create_gradient_matrix(self, bvecs_file, bvals_file):
_gradient_matrix_file = "gradient_matrix.txt"
with open(bvals_file) as fbvals:
bvals = [val for val in re.split(r"\s+", fbvals.readline().strip())]
bvals = fbvals.readline().strip().split()
with open(bvecs_file) as fbvecs:
bvecs_x = fbvecs.readline().split()
bvecs_y = fbvecs.readline().split()
Expand Down
12 changes: 6 additions & 6 deletions nipype/interfaces/diffusion_toolkit/odf.py
Original file line number Diff line number Diff line change
Expand Up @@ -98,12 +98,12 @@ class HARDIMat(CommandLine):

def _create_gradient_matrix(self, bvecs_file, bvals_file):
_gradient_matrix_file = "gradient_matrix.txt"
bvals = [val for val in re.split(r"\s+", open(bvals_file).readline().strip())]
bvecs_f = open(bvecs_file)
bvecs_x = [val for val in re.split(r"\s+", bvecs_f.readline().strip())]
bvecs_y = [val for val in re.split(r"\s+", bvecs_f.readline().strip())]
bvecs_z = [val for val in re.split(r"\s+", bvecs_f.readline().strip())]
bvecs_f.close()
with open(bvals_file) as bvals_f:
bvals = bvals_f.readline().strip().split()
with open(bvecs_file) as bvecs_f:
bvecs_x = bvecs_f.readline().strip().split()
bvecs_y = bvecs_f.readline().strip().split()
bvecs_z = bvecs_f.readline().strip().split()
gradient_matrix_f = open(_gradient_matrix_file, "w")
for i in range(len(bvals)):
if int(bvals[i]) == 0:
Expand Down
4 changes: 2 additions & 2 deletions nipype/interfaces/freesurfer/preprocess.py
Original file line number Diff line number Diff line change
Expand Up @@ -692,11 +692,11 @@ def _get_runs(self):
if self.inputs.seq_list:
if self.inputs.ignore_single_slice:
if (int(s[8]) > 1) and any(
[s[12].startswith(sn) for sn in self.inputs.seq_list]
s[12].startswith(sn) for sn in self.inputs.seq_list
):
runs.append(int(s[2]))
else:
if any([s[12].startswith(sn) for sn in self.inputs.seq_list]):
if any(s[12].startswith(sn) for sn in self.inputs.seq_list):
runs.append(int(s[2]))
else:
runs.append(int(s[2]))
Expand Down
6 changes: 3 additions & 3 deletions nipype/interfaces/fsl/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -1503,8 +1503,8 @@ def _run_interface(self, runtime):
regs = sorted(self.inputs.regressors.keys())
nwaves = len(regs)
npoints = len(self.inputs.regressors[regs[0]])
ntcons = sum([1 for con in self.inputs.contrasts if con[1] == "T"])
nfcons = sum([1 for con in self.inputs.contrasts if con[1] == "F"])
ntcons = sum(1 for con in self.inputs.contrasts if con[1] == "T")
nfcons = sum(1 for con in self.inputs.contrasts if con[1] == "F")
# write mat file
mat_txt = ["/NumWaves %d" % nwaves, "/NumPoints %d" % npoints]
ppheights = []
Expand Down Expand Up @@ -1591,7 +1591,7 @@ def _run_interface(self, runtime):

def _list_outputs(self):
outputs = self._outputs().get()
nfcons = sum([1 for con in self.inputs.contrasts if con[1] == "F"])
nfcons = sum(1 for con in self.inputs.contrasts if con[1] == "F")
for field in list(outputs.keys()):
if ("fts" in field) and (nfcons == 0):
continue
Expand Down
12 changes: 6 additions & 6 deletions nipype/interfaces/io.py
Original file line number Diff line number Diff line change
Expand Up @@ -942,7 +942,7 @@ def _list_outputs(self):
# get list of all files in s3 bucket
conn = boto.connect_s3(anon=self.inputs.anon)
bkt = conn.get_bucket(self.inputs.bucket)
bkt_files = list(k.key for k in bkt.list(prefix=self.inputs.bucket_path))
bkt_files = [k.key for k in bkt.list(prefix=self.inputs.bucket_path)]

# keys are outfields, args are template args for the outfield
for key, args in list(self.inputs.template_args.items()):
Expand Down Expand Up @@ -1022,7 +1022,7 @@ def _list_outputs(self):
if self.inputs.sort_filelist:
outfiles = human_order_sorted(outfiles)
outputs[key].append(simplify_list(outfiles))
if any([val is None for val in outputs[key]]):
if None in outputs[key]:
outputs[key] = []
if len(outputs[key]) == 0:
outputs[key] = None
Expand Down Expand Up @@ -1297,7 +1297,7 @@ def _list_outputs(self):
if self.inputs.drop_blank_outputs:
outputs[key] = [x for x in outputs[key] if x is not None]
else:
if any([val is None for val in outputs[key]]):
if None in outputs[key]:
outputs[key] = []
if len(outputs[key]) == 0:
outputs[key] = None
Expand Down Expand Up @@ -2302,7 +2302,7 @@ def __init__(self, input_names, **inputs):
super().__init__(**inputs)

self._input_names = ensure_list(input_names)
add_traits(self.inputs, [name for name in self._input_names])
add_traits(self.inputs, self._input_names)

def _list_outputs(self):
"""Execute this module."""
Expand Down Expand Up @@ -2364,7 +2364,7 @@ def __init__(self, input_names, **inputs):
super().__init__(**inputs)

self._input_names = ensure_list(input_names)
add_traits(self.inputs, [name for name in self._input_names])
add_traits(self.inputs, self._input_names)

def _list_outputs(self):
"""Execute this module."""
Expand Down Expand Up @@ -2642,7 +2642,7 @@ def _list_outputs(self):
outputs[key].append(self._get_files_over_ssh(filledtemplate))

# disclude where there was any invalid matches
if any([val is None for val in outputs[key]]):
if None in outputs[key]:
outputs[key] = []

# no outputs is None, not empty list
Expand Down
4 changes: 2 additions & 2 deletions nipype/interfaces/spm/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -159,7 +159,7 @@ def _parse_inputs(self):
"""validate spm realign options if set to None ignore"""
einputs = super()._parse_inputs(skip=("mask_threshold", "flags"))
if isdefined(self.inputs.flags):
einputs[0].update({flag: val for (flag, val) in self.inputs.flags.items()})
einputs[0].update(self.inputs.flags)
for sessinfo in einputs[0]["sess"]:
sessinfo["scans"] = scans_for_fnames(
ensure_list(sessinfo["scans"]), keep4d=False
Expand Down Expand Up @@ -309,7 +309,7 @@ def _parse_inputs(self):
"""validate spm realign options if set to None ignore"""
einputs = super()._parse_inputs(skip=("flags"))
if isdefined(self.inputs.flags):
einputs[0].update({flag: val for (flag, val) in self.inputs.flags.items()})
einputs[0].update(self.inputs.flags)
return einputs

def _list_outputs(self):
Expand Down
6 changes: 2 additions & 4 deletions nipype/interfaces/utility/wrappers.py
Original file line number Diff line number Diff line change
Expand Up @@ -95,11 +95,9 @@ def __init__(
self.inputs.on_trait_change(self._set_function_string, "function_str")
self._input_names = ensure_list(input_names)
self._output_names = ensure_list(output_names)
add_traits(self.inputs, [name for name in self._input_names])
add_traits(self.inputs, self._input_names)
self.imports = imports
self._out = {}
for name in self._output_names:
self._out[name] = None
self._out = {name: None for name in self._output_names}

def _set_function_string(self, obj, name, old, new):
if name == "function_str":
Expand Down
4 changes: 2 additions & 2 deletions nipype/pipeline/engine/nodes.py
Original file line number Diff line number Diff line change
Expand Up @@ -240,7 +240,7 @@ def needed_outputs(self):
@needed_outputs.setter
def needed_outputs(self, new_outputs):
"""Needed outputs changes the hash, refresh if changed"""
new_outputs = sorted(list(set(new_outputs or [])))
new_outputs = sorted(set(new_outputs or []))
if new_outputs != self._needed_outputs:
# Reset hash
self._hashvalue = None
Expand Down Expand Up @@ -1283,7 +1283,7 @@ def _collate_results(self, nodes):
)
setattr(finalresult.outputs, key, values)

if returncode and any([code is not None for code in returncode]):
if returncode and any(code is not None for code in returncode):
msg = []
for i, code in enumerate(returncode):
if code is not None:
Expand Down
9 changes: 3 additions & 6 deletions nipype/pipeline/engine/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -1046,7 +1046,7 @@ def make_field_func(*pair):
logger.debug("node: %s iterables: %s", inode, iterables)

# collect the subnodes to expand
subnodes = [s for s in dfs_preorder(graph_in, inode)]
subnodes = list(dfs_preorder(graph_in, inode))
prior_prefix = [re.findall(r"\.(.)I", s._id) for s in subnodes if s._id]
prior_prefix = sorted([l for item in prior_prefix for l in item])
if not prior_prefix:
Expand Down Expand Up @@ -1482,11 +1482,8 @@ def clean_working_directory(
files2remove = []
if str2bool(config["execution"]["remove_unnecessary_outputs"]):
for f in walk_files(cwd):
if f not in needed_files:
if not needed_dirs:
files2remove.append(f)
elif not any([f.startswith(dname) for dname in needed_dirs]):
files2remove.append(f)
if f not in needed_files and not f.startswith(tuple(needed_dirs)):
files2remove.append(f)
else:
if not str2bool(config["execution"]["keep_inputs"]):
input_files = {
Expand Down
12 changes: 4 additions & 8 deletions nipype/pipeline/engine/workflows.py
Original file line number Diff line number Diff line change
Expand Up @@ -191,10 +191,8 @@ def connect(self, *args, **kwargs):
and (
".io" in str(destnode._interface.__class__)
or any(
[
".io" in str(val)
for val in destnode._interface.__class__.__bases__
]
".io" in str(val)
for val in destnode._interface.__class__.__bases__
)
)
):
Expand All @@ -205,10 +203,8 @@ def connect(self, *args, **kwargs):
and (
".io" in str(srcnode._interface.__class__)
or any(
[
".io" in str(val)
for val in srcnode._interface.__class__.__bases__
]
".io" in str(val)
for val in srcnode._interface.__class__.__bases__
)
)
):
Expand Down
2 changes: 1 addition & 1 deletion nipype/pipeline/plugins/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -455,7 +455,7 @@ def _remove_node_deps(self, jobid, crashfile, graph):
dfs_preorder = nx.dfs_preorder
except AttributeError:
dfs_preorder = nx.dfs_preorder_nodes
subnodes = [s for s in dfs_preorder(graph, self.procs[jobid])]
subnodes = list(dfs_preorder(graph, self.procs[jobid]))
for node in subnodes:
idx = self.procs.index(node)
self.proc_done[idx] = True
Expand Down
2 changes: 1 addition & 1 deletion nipype/pipeline/plugins/linear.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ def run(self, graph, config, updatehash=False):
# node might fail
crashfile = report_crash(node)
# remove dependencies from queue
subnodes = [s for s in dfs_preorder(graph, node)]
subnodes = list(dfs_preorder(graph, node))
notrun.append(
{"node": node, "dependents": subnodes, "crashfile": crashfile}
)
Expand Down
2 changes: 1 addition & 1 deletion nipype/scripts/instance.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ def list_interfaces(module):
the given module.
"""
iface_names = []
for k, v in sorted(list(module.__dict__.items())):
for k, v in sorted(module.__dict__.items()):
if inspect.isclass(v) and issubclass(v, Interface):
iface_names.append(k)
return iface_names
2 changes: 1 addition & 1 deletion nipype/utils/docparse.py
Original file line number Diff line number Diff line change
Expand Up @@ -283,7 +283,7 @@ def _parse_doc(doc, style=["--"]):
flag = [
item
for i, item in enumerate(linelist)
if i < 2 and any([item.startswith(s) for s in style]) and len(item) > 1
if i < 2 and item.startswith(tuple(style)) and len(item) > 1
]
if flag:
if len(flag) == 1:
Expand Down
2 changes: 1 addition & 1 deletion nipype/utils/filemanip.py
Original file line number Diff line number Diff line change
Expand Up @@ -499,7 +499,7 @@ def ensure_list(filename):
elif isinstance(filename, list):
return filename
elif is_container(filename):
return [x for x in filename]
return list(filename)
else:
return None

Expand Down
2 changes: 1 addition & 1 deletion nipype/utils/misc.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ def trim(docstring, marker=None):
if (
marker is not None
and stripped
and all([s == stripped[0] for s in stripped])
and all(s == stripped[0] for s in stripped)
and stripped[0] not in [":"]
):
line = line.replace(stripped[0], marker)
Expand Down
2 changes: 1 addition & 1 deletion nipype/utils/nipype_cmd.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ def listClasses(module=None):
__import__(module)
pkg = sys.modules[module]
print("Available Interfaces:")
for k, v in sorted(list(pkg.__dict__.items())):
for k, v in sorted(pkg.__dict__.items()):
if inspect.isclass(v) and issubclass(v, Interface):
print("\t%s" % k)

Expand Down

0 comments on commit 25469f1

Please sign in to comment.