Skip to content

Commit 6a8318f

Browse files
authored
Merge pull request #2196 from satra/fix/timeout
fix for networkx and afni
2 parents 4a0bd3d + 9ced114 commit 6a8318f

24 files changed

+116
-120
lines changed

doc/users/install.rst

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -47,7 +47,7 @@ use the following command::
4747
While `all` installs everything, one can also install select components as
4848
listed below::
4949

50-
'doc': ['Sphinx>=1.4', 'matplotlib', 'pydotplus'],
50+
'doc': ['Sphinx>=1.4', 'matplotlib', 'pydotplus', 'pydot>=1.2.3'],
5151
'tests': ['pytest-cov', 'codecov'],
5252
'nipy': ['nitime', 'nilearn', 'dipy', 'nipy', 'matplotlib'],
5353
'profiler': ['psutil'],

examples/dmri_dtk_dti.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,6 @@
3737

3838
package_check('numpy', '1.3', 'tutorial1')
3939
package_check('scipy', '0.7', 'tutorial1')
40-
package_check('networkx', '1.0', 'tutorial1')
4140
package_check('IPython', '0.10', 'tutorial1')
4241

4342

examples/dmri_dtk_odf.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,6 @@
3737

3838
package_check('numpy', '1.3', 'tutorial1')
3939
package_check('scipy', '0.7', 'tutorial1')
40-
package_check('networkx', '1.0', 'tutorial1')
4140
package_check('IPython', '0.10', 'tutorial1')
4241

4342

examples/dmri_fsl_dti.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,6 @@
3737

3838
package_check('numpy', '1.3', 'tutorial1')
3939
package_check('scipy', '0.7', 'tutorial1')
40-
package_check('networkx', '1.0', 'tutorial1')
4140
package_check('IPython', '0.10', 'tutorial1')
4241

4342

examples/fmri_slicer_coregistration.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,6 @@
3737

3838
package_check('numpy', '1.3', 'tutorial1')
3939
package_check('scipy', '0.7', 'tutorial1')
40-
package_check('networkx', '1.0', 'tutorial1')
4140
package_check('IPython', '0.10', 'tutorial1')
4241

4342
"""The nipype tutorial contains data for two subjects. Subject data

nipype/info.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -107,6 +107,7 @@ def get_nipype_gitversion():
107107
SIMPLEJSON_MIN_VERSION = '3.8.0'
108108
PROV_VERSION = '1.5.0'
109109
CLICK_MIN_VERSION = '6.6.0'
110+
PYDOT_MIN_VERSION = '1.2.3'
110111

111112
NAME = 'nipype'
112113
MAINTAINER = 'nipype developers'
@@ -142,6 +143,7 @@ def get_nipype_gitversion():
142143
'pytest>=%s' % PYTEST_MIN_VERSION,
143144
'mock',
144145
'pydotplus',
146+
'pydot>=%s' % PYDOT_MIN_VERSION,
145147
'packaging',
146148
]
147149

@@ -154,7 +156,7 @@ def get_nipype_gitversion():
154156
]
155157

156158
EXTRA_REQUIRES = {
157-
'doc': ['Sphinx>=1.4', 'matplotlib', 'pydotplus'],
159+
'doc': ['Sphinx>=1.4', 'matplotlib', 'pydotplus', 'pydot>=1.2.3'],
158160
'tests': TESTS_REQUIRES,
159161
'nipy': ['nitime', 'nilearn', 'dipy', 'nipy', 'matplotlib'],
160162
'profiler': ['psutil'],

nipype/interfaces/afni/base.py

Lines changed: 16 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@
1010
from sys import platform
1111
from distutils import spawn
1212

13-
from ... import logging
13+
from ... import logging, LooseVersion
1414
from ...utils.filemanip import split_filename, fname_presuffix
1515

1616
from ..base import (
@@ -44,32 +44,25 @@ def version():
4444
4545
"""
4646
try:
47-
clout = CommandLine(command='afni_vcheck',
47+
clout = CommandLine(command='afni --version',
4848
terminal_output='allatonce').run()
49-
50-
# Try to parse the version number
51-
currv = clout.runtime.stdout.split('\n')[1].split('=', 1)[1].strip()
5249
except IOError:
5350
# If afni_vcheck is not present, return None
54-
IFLOGGER.warn('afni_vcheck executable not found.')
51+
IFLOGGER.warn('afni executable not found.')
5552
return None
56-
except RuntimeError as e:
57-
# If AFNI is outdated, afni_vcheck throws error.
58-
# Show new version, but parse current anyways.
59-
currv = str(e).split('\n')[4].split('=', 1)[1].strip()
60-
nextv = str(e).split('\n')[6].split('=', 1)[1].strip()
61-
IFLOGGER.warn(
62-
'AFNI is outdated, detected version %s and %s is available.' % (currv, nextv))
63-
64-
if currv.startswith('AFNI_'):
65-
currv = currv[5:]
66-
67-
v = currv.split('.')
68-
try:
69-
v = [int(n) for n in v]
70-
except ValueError:
71-
return currv
72-
return tuple(v)
53+
54+
version_stamp = clout.runtime.stdout.split('\n')[0].split('Version ')[1]
55+
if version_stamp.startswith('AFNI'):
56+
version_stamp = version_stamp.split('AFNI_')[1]
57+
elif version_stamp.startswith('Debian'):
58+
version_stamp = version_stamp.split('Debian-')[1].split('~')[0]
59+
else:
60+
return None
61+
62+
version = LooseVersion(version_stamp.replace('_', '.')).version[:3]
63+
if version[0] < 1000:
64+
version[0] = version[0] + 2000
65+
return tuple(version)
7366

7467
@classmethod
7568
def output_type_to_ext(cls, outputtype):

nipype/interfaces/afni/preprocess.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1457,7 +1457,7 @@ def __init__(self, **inputs):
14571457
version = Info.version()
14581458

14591459
# As of AFNI 16.0.00, redirect_x is not needed
1460-
if isinstance(version[0], int) and version[0] > 15:
1460+
if version[0] > 2015:
14611461
self._redirect_x = False
14621462

14631463
def _parse_inputs(self, skip=None):
@@ -2150,7 +2150,7 @@ def __init__(self, **inputs):
21502150
v = Info.version()
21512151

21522152
# As of AFNI 16.0.00, redirect_x is not needed
2153-
if isinstance(v[0], int) and v[0] > 15:
2153+
if v[0] > 2015:
21542154
self._redirect_x = False
21552155

21562156

nipype/interfaces/cmtk/cmtk.py

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -214,16 +214,16 @@ def cmat(track_file, roi_file, resolution_network_file, matrix_name, matrix_mat_
214214
nROIs = len(gp.nodes())
215215

216216
# add node information from parcellation
217-
if 'dn_position' in gp.node[gp.nodes()[0]]:
217+
if 'dn_position' in gp.nodes[list(gp.nodes())[0]]:
218218
G = gp.copy()
219219
else:
220220
G = nx.Graph()
221-
for u, d in gp.nodes_iter(data=True):
222-
G.add_node(int(u), d)
221+
for u, d in gp.nodes(data=True):
222+
G.add_node(int(u), **d)
223223
# compute a position for the node based on the mean position of the
224224
# ROI in voxel coordinates (segmentation volume )
225225
xyz = tuple(np.mean(np.where(np.flipud(roiData) == int(d["dn_correspondence_id"])), axis=1))
226-
G.node[int(u)]['dn_position'] = tuple([xyz[0], xyz[2], -xyz[1]])
226+
G.nodes[int(u)]['dn_position'] = tuple([xyz[0], xyz[2], -xyz[1]])
227227

228228
if intersections:
229229
iflogger.info("Filtering tractography from intersections")
@@ -304,7 +304,7 @@ def cmat(track_file, roi_file, resolution_network_file, matrix_name, matrix_mat_
304304
fibmean = numfib.copy()
305305
fibmedian = numfib.copy()
306306
fibdev = numfib.copy()
307-
for u, v, d in G.edges_iter(data=True):
307+
for u, v, d in G.edges(data=True):
308308
G.remove_edge(u, v)
309309
di = {}
310310
if 'fiblist' in d:
@@ -319,7 +319,7 @@ def cmat(track_file, roi_file, resolution_network_file, matrix_name, matrix_mat_
319319
di['fiber_length_median'] = 0
320320
di['fiber_length_std'] = 0
321321
if not u == v: # Fix for self loop problem
322-
G.add_edge(u, v, di)
322+
G.add_edge(u, v, **di)
323323
if 'fiblist' in d:
324324
numfib.add_edge(u, v, weight=di['number_of_fibers'])
325325
fibmean.add_edge(u, v, weight=di['fiber_length_mean'])
@@ -747,10 +747,10 @@ def create_nodes(roi_file, resolution_network_file, out_filename):
747747
roi_image = nb.load(roi_file, mmap=NUMPY_MMAP)
748748
roiData = roi_image.get_data()
749749
nROIs = len(gp.nodes())
750-
for u, d in gp.nodes_iter(data=True):
751-
G.add_node(int(u), d)
750+
for u, d in gp.nodes(data=True):
751+
G.add_node(int(u), **d)
752752
xyz = tuple(np.mean(np.where(np.flipud(roiData) == int(d["dn_correspondence_id"])), axis=1))
753-
G.node[int(u)]['dn_position'] = tuple([xyz[0], xyz[2], -xyz[1]])
753+
G.nodes[int(u)]['dn_position'] = tuple([xyz[0], xyz[2], -xyz[1]])
754754
nx.write_gpickle(G, out_filename)
755755
return out_filename
756756

nipype/interfaces/cmtk/nbs.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -113,9 +113,9 @@ def _run_interface(self, runtime):
113113
node_network = nx.read_gpickle(node_ntwk_name)
114114
iflogger.info('Populating node dictionaries with attributes from {node}'.format(node=node_ntwk_name))
115115

116-
for nid, ndata in node_network.nodes_iter(data=True):
117-
nbsgraph.node[nid] = ndata
118-
nbs_pval_graph.node[nid] = ndata
116+
for nid, ndata in node_network.nodes(data=True):
117+
nbsgraph.nodes[nid] = ndata
118+
nbs_pval_graph.nodes[nid] = ndata
119119

120120
path = op.abspath('NBS_Result_' + details)
121121
iflogger.info(path)

nipype/interfaces/cmtk/nx.py

Lines changed: 27 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -48,7 +48,7 @@ def read_unknown_ntwk(ntwk):
4848

4949
def remove_all_edges(ntwk):
5050
ntwktmp = ntwk.copy()
51-
edges = ntwktmp.edges_iter()
51+
edges = list(ntwktmp.edges())
5252
for edge in edges:
5353
ntwk.remove_edge(edge[0], edge[1])
5454
return ntwk
@@ -60,20 +60,20 @@ def fix_keys_for_gexf(orig):
6060
"""
6161
import networkx as nx
6262
ntwk = nx.Graph()
63-
nodes = orig.nodes_iter()
64-
edges = orig.edges_iter()
63+
nodes = list(orig.nodes())
64+
edges = list(orig.edges())
6565
for node in nodes:
6666
newnodedata = {}
67-
newnodedata.update(orig.node[node])
68-
if 'dn_fsname' in orig.node[node]:
69-
newnodedata['label'] = orig.node[node]['dn_fsname']
70-
ntwk.add_node(str(node), newnodedata)
71-
if 'dn_position' in ntwk.node[str(node)] and 'dn_position' in newnodedata:
72-
ntwk.node[str(node)]['dn_position'] = str(newnodedata['dn_position'])
67+
newnodedata.update(orig.nodes[node])
68+
if 'dn_fsname' in orig.nodes[node]:
69+
newnodedata['label'] = orig.nodes[node]['dn_fsname']
70+
ntwk.add_node(str(node), **newnodedata)
71+
if 'dn_position' in ntwk.nodes[str(node)] and 'dn_position' in newnodedata:
72+
ntwk.nodes[str(node)]['dn_position'] = str(newnodedata['dn_position'])
7373
for edge in edges:
7474
data = {}
7575
data = orig.edge[edge[0]][edge[1]]
76-
ntwk.add_edge(str(edge[0]), str(edge[1]), data)
76+
ntwk.add_edge(str(edge[0]), str(edge[1]), **data)
7777
if 'fiber_length_mean' in ntwk.edge[str(edge[0])][str(edge[1])]:
7878
ntwk.edge[str(edge[0])][str(edge[1])]['fiber_length_mean'] = str(data['fiber_length_mean'])
7979
if 'fiber_length_std' in ntwk.edge[str(edge[0])][str(edge[1])]:
@@ -125,7 +125,7 @@ def average_networks(in_files, ntwk_res_file, group_id):
125125
tmp = nx.read_gpickle(subject)
126126
iflogger.info(('File {s} has {n} '
127127
'edges').format(s=subject, n=tmp.number_of_edges()))
128-
edges = tmp.edges_iter()
128+
edges = list(tmp.edges())
129129
for edge in edges:
130130
data = {}
131131
data = tmp.edge[edge[0]][edge[1]]
@@ -134,29 +134,29 @@ def average_networks(in_files, ntwk_res_file, group_id):
134134
current = {}
135135
current = ntwk.edge[edge[0]][edge[1]]
136136
data = add_dicts_by_key(current, data)
137-
ntwk.add_edge(edge[0], edge[1], data)
138-
nodes = tmp.nodes_iter()
137+
ntwk.add_edge(edge[0], edge[1], **data)
138+
nodes = list(nodes())
139139
for node in nodes:
140140
data = {}
141-
data = ntwk.node[node]
142-
if 'value' in tmp.node[node]:
143-
data['value'] = data['value'] + tmp.node[node]['value']
144-
ntwk.add_node(node, data)
141+
data = ntwk.nodes[node]
142+
if 'value' in tmp.nodes[node]:
143+
data['value'] = data['value'] + tmp.nodes[node]['value']
144+
ntwk.add_node(node, **data)
145145

146146
# Divides each value by the number of files
147-
nodes = ntwk.nodes_iter()
148-
edges = ntwk.edges_iter()
147+
nodes = list(ntwk.nodes())
148+
edges = list(ntwk.edges())
149149
iflogger.info(('Total network has {n} '
150150
'edges').format(n=ntwk.number_of_edges()))
151151
avg_ntwk = nx.Graph()
152152
newdata = {}
153153
for node in nodes:
154-
data = ntwk.node[node]
154+
data = ntwk.nodes[node]
155155
newdata = data
156156
if 'value' in data:
157157
newdata['value'] = data['value'] / len(in_files)
158-
ntwk.node[node]['value'] = newdata
159-
avg_ntwk.add_node(node, newdata)
158+
ntwk.nodes[node]['value'] = newdata
159+
avg_ntwk.add_node(node, **newdata)
160160

161161
edge_dict = {}
162162
edge_dict['count'] = np.zeros((avg_ntwk.number_of_nodes(),
@@ -168,12 +168,12 @@ def average_networks(in_files, ntwk_res_file, group_id):
168168
if not key == 'count':
169169
data[key] = data[key] / len(in_files)
170170
ntwk.edge[edge[0]][edge[1]] = data
171-
avg_ntwk.add_edge(edge[0], edge[1], data)
171+
avg_ntwk.add_edge(edge[0], edge[1], **data)
172172
edge_dict['count'][edge[0] - 1][edge[1] - 1] = ntwk.edge[edge[0]][edge[1]]['count']
173173

174174
iflogger.info('After thresholding, the average network has has {n} edges'.format(n=avg_ntwk.number_of_edges()))
175175

176-
avg_edges = avg_ntwk.edges_iter()
176+
avg_edges = avg_ntwk.edges()
177177
for edge in avg_edges:
178178
data = avg_ntwk.edge[edge[0]][edge[1]]
179179
for key in list(data.keys()):
@@ -319,11 +319,11 @@ def compute_network_measures(ntwk):
319319
def add_node_data(node_array, ntwk):
320320
node_ntwk = nx.Graph()
321321
newdata = {}
322-
for idx, data in ntwk.nodes_iter(data=True):
322+
for idx, data in ntwk.nodes(data=True):
323323
if not int(idx) == 0:
324324
newdata['value'] = node_array[int(idx) - 1]
325325
data.update(newdata)
326-
node_ntwk.add_node(int(idx), data)
326+
node_ntwk.add_node(int(idx), **data)
327327
return node_ntwk
328328

329329

@@ -339,7 +339,7 @@ def add_edge_data(edge_array, ntwk, above=0, below=0):
339339
old_edge_dict = edge_ntwk.edge[x + 1][y + 1]
340340
edge_ntwk.remove_edge(x + 1, y + 1)
341341
data.update(old_edge_dict)
342-
edge_ntwk.add_edge(x + 1, y + 1, data)
342+
edge_ntwk.add_edge(x + 1, y + 1, **data)
343343
return edge_ntwk
344344

345345

nipype/interfaces/cmtk/parcellation.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -213,7 +213,7 @@ def create_roi(subject_id, subjects_dir, fs_dir, parcellation_name, dilation):
213213
rois = np.zeros((256, 256, 256), dtype=np.int16)
214214

215215
count = 0
216-
for brk, brv in pg.nodes_iter(data=True):
216+
for brk, brv in pg.nodes(data=True):
217217
count = count + 1
218218
iflogger.info(brv)
219219
iflogger.info(brk)
@@ -429,7 +429,7 @@ def create_wm_mask(subject_id, subjects_dir, fs_dir, parcellation_name):
429429
roid = roi.get_data()
430430
assert roid.shape[0] == wmmask.shape[0]
431431
pg = nx.read_graphml(pgpath)
432-
for brk, brv in pg.nodes_iter(data=True):
432+
for brk, brv in pg.nodes(data=True):
433433
if brv['dn_region'] == 'cortical':
434434
iflogger.info("Subtracting region %s with intensity value %s" %
435435
(brv['dn_region'], brv['dn_correspondence_id']))

nipype/pipeline/engine/tests/test_engine.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -316,7 +316,7 @@ def test_disconnect():
316316
flow1 = pe.Workflow(name='test')
317317
flow1.connect(a, 'a', b, 'a')
318318
flow1.disconnect(a, 'a', b, 'a')
319-
assert flow1._graph.edges() == []
319+
assert list(flow1._graph.edges()) == []
320320

321321

322322
def test_doubleconnect():
@@ -637,7 +637,7 @@ def func1(in1):
637637
n1.inputs.in1 = [1]
638638
eg = w1.run()
639639

640-
node = eg.nodes()[0]
640+
node = list(eg.nodes())[0]
641641
outjson = glob(os.path.join(node.output_dir(), '_0x*.json'))
642642
assert len(outjson) == 1
643643

0 commit comments

Comments
 (0)