Skip to content
Closed
2 changes: 1 addition & 1 deletion doc/users/install.rst
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ use the following command::
While `all` installs everything, one can also install select components as
listed below::

'doc': ['Sphinx>=1.4', 'matplotlib', 'pydotplus'],
'doc': ['Sphinx>=1.4', 'matplotlib', 'pydotplus', 'pydot>=1.2.3'],
'tests': ['pytest-cov', 'codecov'],
'nipy': ['nitime', 'nilearn', 'dipy', 'nipy', 'matplotlib'],
'profiler': ['psutil'],
Expand Down
4 changes: 3 additions & 1 deletion nipype/info.py
Original file line number Diff line number Diff line change
Expand Up @@ -107,6 +107,7 @@ def get_nipype_gitversion():
SIMPLEJSON_MIN_VERSION = '3.8.0'
PROV_VERSION = '1.5.0'
CLICK_MIN_VERSION = '6.6.0'
PYDOT_MIN_VERSION = '1.2.3'

NAME = 'nipype'
MAINTAINER = 'nipype developers'
Expand Down Expand Up @@ -142,6 +143,7 @@ def get_nipype_gitversion():
'pytest>=%s' % PYTEST_MIN_VERSION,
'mock',
'pydotplus',
'pydot>=%s' % PYDOT_MIN_VERSION,
'packaging',
]

Expand All @@ -154,7 +156,7 @@ def get_nipype_gitversion():
]

EXTRA_REQUIRES = {
'doc': ['Sphinx>=1.4', 'matplotlib', 'pydotplus'],
'doc': ['Sphinx>=1.4', 'matplotlib', 'pydotplus', 'pydot>=1.2.3'],
'tests': TESTS_REQUIRES,
'nipy': ['nitime', 'nilearn', 'dipy', 'nipy', 'matplotlib'],
'profiler': ['psutil'],
Expand Down
18 changes: 9 additions & 9 deletions nipype/interfaces/cmtk/cmtk.py
Original file line number Diff line number Diff line change
Expand Up @@ -214,16 +214,16 @@ def cmat(track_file, roi_file, resolution_network_file, matrix_name, matrix_mat_
nROIs = len(gp.nodes())

# add node information from parcellation
if 'dn_position' in gp.node[gp.nodes()[0]]:
if 'dn_position' in gp.nodes[list(gp.nodes())[0]]:
G = gp.copy()
else:
G = nx.Graph()
for u, d in gp.nodes_iter(data=True):
G.add_node(int(u), d)
for u, d in gp.nodes(data=True):
G.add_node(int(u), **d)
# compute a position for the node based on the mean position of the
# ROI in voxel coordinates (segmentation volume )
xyz = tuple(np.mean(np.where(np.flipud(roiData) == int(d["dn_correspondence_id"])), axis=1))
G.node[int(u)]['dn_position'] = tuple([xyz[0], xyz[2], -xyz[1]])
G.nodes[int(u)]['dn_position'] = tuple([xyz[0], xyz[2], -xyz[1]])

if intersections:
iflogger.info("Filtering tractography from intersections")
Expand Down Expand Up @@ -304,7 +304,7 @@ def cmat(track_file, roi_file, resolution_network_file, matrix_name, matrix_mat_
fibmean = numfib.copy()
fibmedian = numfib.copy()
fibdev = numfib.copy()
for u, v, d in G.edges_iter(data=True):
for u, v, d in G.edges(data=True):
G.remove_edge(u, v)
di = {}
if 'fiblist' in d:
Expand All @@ -319,7 +319,7 @@ def cmat(track_file, roi_file, resolution_network_file, matrix_name, matrix_mat_
di['fiber_length_median'] = 0
di['fiber_length_std'] = 0
if not u == v: # Fix for self loop problem
G.add_edge(u, v, di)
G.add_edge(u, v, **di)
if 'fiblist' in d:
numfib.add_edge(u, v, weight=di['number_of_fibers'])
fibmean.add_edge(u, v, weight=di['fiber_length_mean'])
Expand Down Expand Up @@ -747,10 +747,10 @@ def create_nodes(roi_file, resolution_network_file, out_filename):
roi_image = nb.load(roi_file, mmap=NUMPY_MMAP)
roiData = roi_image.get_data()
nROIs = len(gp.nodes())
for u, d in gp.nodes_iter(data=True):
G.add_node(int(u), d)
for u, d in gp.nodes(data=True):
G.add_node(int(u), **d)
xyz = tuple(np.mean(np.where(np.flipud(roiData) == int(d["dn_correspondence_id"])), axis=1))
G.node[int(u)]['dn_position'] = tuple([xyz[0], xyz[2], -xyz[1]])
G.nodes[int(u)]['dn_position'] = tuple([xyz[0], xyz[2], -xyz[1]])
nx.write_gpickle(G, out_filename)
return out_filename

Expand Down
6 changes: 3 additions & 3 deletions nipype/interfaces/cmtk/nbs.py
Original file line number Diff line number Diff line change
Expand Up @@ -113,9 +113,9 @@ def _run_interface(self, runtime):
node_network = nx.read_gpickle(node_ntwk_name)
iflogger.info('Populating node dictionaries with attributes from {node}'.format(node=node_ntwk_name))

for nid, ndata in node_network.nodes_iter(data=True):
nbsgraph.node[nid] = ndata
nbs_pval_graph.node[nid] = ndata
for nid, ndata in node_network.nodes(data=True):
nbsgraph.nodes[nid] = ndata
nbs_pval_graph.nodes[nid] = ndata

path = op.abspath('NBS_Result_' + details)
iflogger.info(path)
Expand Down
54 changes: 27 additions & 27 deletions nipype/interfaces/cmtk/nx.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ def read_unknown_ntwk(ntwk):

def remove_all_edges(ntwk):
ntwktmp = ntwk.copy()
edges = ntwktmp.edges_iter()
edges = list(ntwktmp.edges())
for edge in edges:
ntwk.remove_edge(edge[0], edge[1])
return ntwk
Expand All @@ -60,20 +60,20 @@ def fix_keys_for_gexf(orig):
"""
import networkx as nx
ntwk = nx.Graph()
nodes = orig.nodes_iter()
edges = orig.edges_iter()
nodes = list(orig.nodes())
edges = list(orig.edges())
for node in nodes:
newnodedata = {}
newnodedata.update(orig.node[node])
if 'dn_fsname' in orig.node[node]:
newnodedata['label'] = orig.node[node]['dn_fsname']
ntwk.add_node(str(node), newnodedata)
if 'dn_position' in ntwk.node[str(node)] and 'dn_position' in newnodedata:
ntwk.node[str(node)]['dn_position'] = str(newnodedata['dn_position'])
newnodedata.update(orig.nodes[node])
if 'dn_fsname' in orig.nodes[node]:
newnodedata['label'] = orig.nodes[node]['dn_fsname']
ntwk.add_node(str(node), **newnodedata)
if 'dn_position' in ntwk.nodes[str(node)] and 'dn_position' in newnodedata:
ntwk.nodes[str(node)]['dn_position'] = str(newnodedata['dn_position'])
for edge in edges:
data = {}
data = orig.edge[edge[0]][edge[1]]
ntwk.add_edge(str(edge[0]), str(edge[1]), data)
ntwk.add_edge(str(edge[0]), str(edge[1]), **data)
if 'fiber_length_mean' in ntwk.edge[str(edge[0])][str(edge[1])]:
ntwk.edge[str(edge[0])][str(edge[1])]['fiber_length_mean'] = str(data['fiber_length_mean'])
if 'fiber_length_std' in ntwk.edge[str(edge[0])][str(edge[1])]:
Expand Down Expand Up @@ -125,7 +125,7 @@ def average_networks(in_files, ntwk_res_file, group_id):
tmp = nx.read_gpickle(subject)
iflogger.info(('File {s} has {n} '
'edges').format(s=subject, n=tmp.number_of_edges()))
edges = tmp.edges_iter()
edges = list(tmp.edges())
for edge in edges:
data = {}
data = tmp.edge[edge[0]][edge[1]]
Expand All @@ -134,29 +134,29 @@ def average_networks(in_files, ntwk_res_file, group_id):
current = {}
current = ntwk.edge[edge[0]][edge[1]]
data = add_dicts_by_key(current, data)
ntwk.add_edge(edge[0], edge[1], data)
nodes = tmp.nodes_iter()
ntwk.add_edge(edge[0], edge[1], **data)
nodes = list(nodes())
for node in nodes:
data = {}
data = ntwk.node[node]
if 'value' in tmp.node[node]:
data['value'] = data['value'] + tmp.node[node]['value']
ntwk.add_node(node, data)
data = ntwk.nodes[node]
if 'value' in tmp.nodes[node]:
data['value'] = data['value'] + tmp.nodes[node]['value']
ntwk.add_node(node, **data)

# Divides each value by the number of files
nodes = ntwk.nodes_iter()
edges = ntwk.edges_iter()
nodes = list(ntwk.nodes())
edges = list(ntwk.edges())
iflogger.info(('Total network has {n} '
'edges').format(n=ntwk.number_of_edges()))
avg_ntwk = nx.Graph()
newdata = {}
for node in nodes:
data = ntwk.node[node]
data = ntwk.nodes[node]
newdata = data
if 'value' in data:
newdata['value'] = data['value'] / len(in_files)
ntwk.node[node]['value'] = newdata
avg_ntwk.add_node(node, newdata)
ntwk.nodes[node]['value'] = newdata
avg_ntwk.add_node(node, **newdata)

edge_dict = {}
edge_dict['count'] = np.zeros((avg_ntwk.number_of_nodes(),
Expand All @@ -168,12 +168,12 @@ def average_networks(in_files, ntwk_res_file, group_id):
if not key == 'count':
data[key] = data[key] / len(in_files)
ntwk.edge[edge[0]][edge[1]] = data
avg_ntwk.add_edge(edge[0], edge[1], data)
avg_ntwk.add_edge(edge[0], edge[1], **data)
edge_dict['count'][edge[0] - 1][edge[1] - 1] = ntwk.edge[edge[0]][edge[1]]['count']

iflogger.info('After thresholding, the average network has has {n} edges'.format(n=avg_ntwk.number_of_edges()))

avg_edges = avg_ntwk.edges_iter()
avg_edges = avg_ntwk.edges()
for edge in avg_edges:
data = avg_ntwk.edge[edge[0]][edge[1]]
for key in list(data.keys()):
Expand Down Expand Up @@ -319,11 +319,11 @@ def compute_network_measures(ntwk):
def add_node_data(node_array, ntwk):
node_ntwk = nx.Graph()
newdata = {}
for idx, data in ntwk.nodes_iter(data=True):
for idx, data in ntwk.nodes(data=True):
if not int(idx) == 0:
newdata['value'] = node_array[int(idx) - 1]
data.update(newdata)
node_ntwk.add_node(int(idx), data)
node_ntwk.add_node(int(idx), **data)
return node_ntwk


Expand All @@ -339,7 +339,7 @@ def add_edge_data(edge_array, ntwk, above=0, below=0):
old_edge_dict = edge_ntwk.edge[x + 1][y + 1]
edge_ntwk.remove_edge(x + 1, y + 1)
data.update(old_edge_dict)
edge_ntwk.add_edge(x + 1, y + 1, data)
edge_ntwk.add_edge(x + 1, y + 1, **data)
return edge_ntwk


Expand Down
4 changes: 2 additions & 2 deletions nipype/interfaces/cmtk/parcellation.py
Original file line number Diff line number Diff line change
Expand Up @@ -213,7 +213,7 @@ def create_roi(subject_id, subjects_dir, fs_dir, parcellation_name, dilation):
rois = np.zeros((256, 256, 256), dtype=np.int16)

count = 0
for brk, brv in pg.nodes_iter(data=True):
for brk, brv in pg.nodes(data=True):
count = count + 1
iflogger.info(brv)
iflogger.info(brk)
Expand Down Expand Up @@ -429,7 +429,7 @@ def create_wm_mask(subject_id, subjects_dir, fs_dir, parcellation_name):
roid = roi.get_data()
assert roid.shape[0] == wmmask.shape[0]
pg = nx.read_graphml(pgpath)
for brk, brv in pg.nodes_iter(data=True):
for brk, brv in pg.nodes(data=True):
if brv['dn_region'] == 'cortical':
iflogger.info("Subtracting region %s with intensity value %s" %
(brv['dn_region'], brv['dn_correspondence_id']))
Expand Down
4 changes: 2 additions & 2 deletions nipype/pipeline/engine/tests/test_engine.py
Original file line number Diff line number Diff line change
Expand Up @@ -316,7 +316,7 @@ def test_disconnect():
flow1 = pe.Workflow(name='test')
flow1.connect(a, 'a', b, 'a')
flow1.disconnect(a, 'a', b, 'a')
assert flow1._graph.edges() == []
assert list(flow1._graph.edges()) == []


def test_doubleconnect():
Expand Down Expand Up @@ -637,7 +637,7 @@ def func1(in1):
n1.inputs.in1 = [1]
eg = w1.run()

node = eg.nodes()[0]
node = list(eg.nodes())[0]
outjson = glob(os.path.join(node.output_dir(), '_0x*.json'))
assert len(outjson) == 1

Expand Down
Loading