diff --git a/doc/users/install.rst b/doc/users/install.rst index 2e38122c68..9f500e7ec4 100644 --- a/doc/users/install.rst +++ b/doc/users/install.rst @@ -47,7 +47,7 @@ use the following command:: While `all` installs everything, one can also install select components as listed below:: - 'doc': ['Sphinx>=1.4', 'matplotlib', 'pydotplus'], + 'doc': ['Sphinx>=1.4', 'matplotlib', 'pydotplus', 'pydot>=1.2.3'], 'tests': ['pytest-cov', 'codecov'], 'nipy': ['nitime', 'nilearn', 'dipy', 'nipy', 'matplotlib'], 'profiler': ['psutil'], diff --git a/examples/dmri_dtk_dti.py b/examples/dmri_dtk_dti.py index e71d519912..4a5e2676cf 100755 --- a/examples/dmri_dtk_dti.py +++ b/examples/dmri_dtk_dti.py @@ -37,7 +37,6 @@ package_check('numpy', '1.3', 'tutorial1') package_check('scipy', '0.7', 'tutorial1') -package_check('networkx', '1.0', 'tutorial1') package_check('IPython', '0.10', 'tutorial1') diff --git a/examples/dmri_dtk_odf.py b/examples/dmri_dtk_odf.py index ff295b1d9f..b4fb978dd0 100755 --- a/examples/dmri_dtk_odf.py +++ b/examples/dmri_dtk_odf.py @@ -37,7 +37,6 @@ package_check('numpy', '1.3', 'tutorial1') package_check('scipy', '0.7', 'tutorial1') -package_check('networkx', '1.0', 'tutorial1') package_check('IPython', '0.10', 'tutorial1') diff --git a/examples/dmri_fsl_dti.py b/examples/dmri_fsl_dti.py index 1eb3c99bdd..05891a8727 100755 --- a/examples/dmri_fsl_dti.py +++ b/examples/dmri_fsl_dti.py @@ -37,7 +37,6 @@ package_check('numpy', '1.3', 'tutorial1') package_check('scipy', '0.7', 'tutorial1') -package_check('networkx', '1.0', 'tutorial1') package_check('IPython', '0.10', 'tutorial1') diff --git a/examples/fmri_slicer_coregistration.py b/examples/fmri_slicer_coregistration.py index daf5bbb9e7..e0129651dd 100755 --- a/examples/fmri_slicer_coregistration.py +++ b/examples/fmri_slicer_coregistration.py @@ -37,7 +37,6 @@ package_check('numpy', '1.3', 'tutorial1') package_check('scipy', '0.7', 'tutorial1') -package_check('networkx', '1.0', 'tutorial1') package_check('IPython', '0.10', 'tutorial1') """The nipype tutorial contains data for two subjects. Subject data diff --git a/nipype/info.py b/nipype/info.py index 4b416c6db3..e813a271fe 100644 --- a/nipype/info.py +++ b/nipype/info.py @@ -107,6 +107,7 @@ def get_nipype_gitversion(): SIMPLEJSON_MIN_VERSION = '3.8.0' PROV_VERSION = '1.5.0' CLICK_MIN_VERSION = '6.6.0' +PYDOT_MIN_VERSION = '1.2.3' NAME = 'nipype' MAINTAINER = 'nipype developers' @@ -142,6 +143,7 @@ def get_nipype_gitversion(): 'pytest>=%s' % PYTEST_MIN_VERSION, 'mock', 'pydotplus', + 'pydot>=%s' % PYDOT_MIN_VERSION, 'packaging', ] @@ -154,7 +156,7 @@ def get_nipype_gitversion(): ] EXTRA_REQUIRES = { - 'doc': ['Sphinx>=1.4', 'matplotlib', 'pydotplus'], + 'doc': ['Sphinx>=1.4', 'matplotlib', 'pydotplus', 'pydot>=1.2.3'], 'tests': TESTS_REQUIRES, 'nipy': ['nitime', 'nilearn', 'dipy', 'nipy', 'matplotlib'], 'profiler': ['psutil'], diff --git a/nipype/interfaces/cmtk/cmtk.py b/nipype/interfaces/cmtk/cmtk.py index 7d65af99a7..4eeec3e370 100644 --- a/nipype/interfaces/cmtk/cmtk.py +++ b/nipype/interfaces/cmtk/cmtk.py @@ -214,16 +214,16 @@ def cmat(track_file, roi_file, resolution_network_file, matrix_name, matrix_mat_ nROIs = len(gp.nodes()) # add node information from parcellation - if 'dn_position' in gp.node[gp.nodes()[0]]: + if 'dn_position' in gp.nodes[list(gp.nodes())[0]]: G = gp.copy() else: G = nx.Graph() - for u, d in gp.nodes_iter(data=True): - G.add_node(int(u), d) + for u, d in gp.nodes(data=True): + G.add_node(int(u), **d) # compute a position for the node based on the mean position of the # ROI in voxel coordinates (segmentation volume ) xyz = tuple(np.mean(np.where(np.flipud(roiData) == int(d["dn_correspondence_id"])), axis=1)) - G.node[int(u)]['dn_position'] = tuple([xyz[0], xyz[2], -xyz[1]]) + G.nodes[int(u)]['dn_position'] = tuple([xyz[0], xyz[2], -xyz[1]]) if intersections: iflogger.info("Filtering tractography from intersections") @@ -304,7 +304,7 @@ def cmat(track_file, roi_file, resolution_network_file, matrix_name, matrix_mat_ fibmean = numfib.copy() fibmedian = numfib.copy() fibdev = numfib.copy() - for u, v, d in G.edges_iter(data=True): + for u, v, d in G.edges(data=True): G.remove_edge(u, v) di = {} if 'fiblist' in d: @@ -319,7 +319,7 @@ def cmat(track_file, roi_file, resolution_network_file, matrix_name, matrix_mat_ di['fiber_length_median'] = 0 di['fiber_length_std'] = 0 if not u == v: # Fix for self loop problem - G.add_edge(u, v, di) + G.add_edge(u, v, **di) if 'fiblist' in d: numfib.add_edge(u, v, weight=di['number_of_fibers']) fibmean.add_edge(u, v, weight=di['fiber_length_mean']) @@ -747,10 +747,10 @@ def create_nodes(roi_file, resolution_network_file, out_filename): roi_image = nb.load(roi_file, mmap=NUMPY_MMAP) roiData = roi_image.get_data() nROIs = len(gp.nodes()) - for u, d in gp.nodes_iter(data=True): - G.add_node(int(u), d) + for u, d in gp.nodes(data=True): + G.add_node(int(u), **d) xyz = tuple(np.mean(np.where(np.flipud(roiData) == int(d["dn_correspondence_id"])), axis=1)) - G.node[int(u)]['dn_position'] = tuple([xyz[0], xyz[2], -xyz[1]]) + G.nodes[int(u)]['dn_position'] = tuple([xyz[0], xyz[2], -xyz[1]]) nx.write_gpickle(G, out_filename) return out_filename diff --git a/nipype/interfaces/cmtk/nbs.py b/nipype/interfaces/cmtk/nbs.py index fd4ff0e050..3754484677 100644 --- a/nipype/interfaces/cmtk/nbs.py +++ b/nipype/interfaces/cmtk/nbs.py @@ -113,9 +113,9 @@ def _run_interface(self, runtime): node_network = nx.read_gpickle(node_ntwk_name) iflogger.info('Populating node dictionaries with attributes from {node}'.format(node=node_ntwk_name)) - for nid, ndata in node_network.nodes_iter(data=True): - nbsgraph.node[nid] = ndata - nbs_pval_graph.node[nid] = ndata + for nid, ndata in node_network.nodes(data=True): + nbsgraph.nodes[nid] = ndata + nbs_pval_graph.nodes[nid] = ndata path = op.abspath('NBS_Result_' + details) iflogger.info(path) diff --git a/nipype/interfaces/cmtk/nx.py b/nipype/interfaces/cmtk/nx.py index 48763256f7..c2f6d7c361 100644 --- a/nipype/interfaces/cmtk/nx.py +++ b/nipype/interfaces/cmtk/nx.py @@ -48,7 +48,7 @@ def read_unknown_ntwk(ntwk): def remove_all_edges(ntwk): ntwktmp = ntwk.copy() - edges = ntwktmp.edges_iter() + edges = list(ntwktmp.edges()) for edge in edges: ntwk.remove_edge(edge[0], edge[1]) return ntwk @@ -60,20 +60,20 @@ def fix_keys_for_gexf(orig): """ import networkx as nx ntwk = nx.Graph() - nodes = orig.nodes_iter() - edges = orig.edges_iter() + nodes = list(orig.nodes()) + edges = list(orig.edges()) for node in nodes: newnodedata = {} - newnodedata.update(orig.node[node]) - if 'dn_fsname' in orig.node[node]: - newnodedata['label'] = orig.node[node]['dn_fsname'] - ntwk.add_node(str(node), newnodedata) - if 'dn_position' in ntwk.node[str(node)] and 'dn_position' in newnodedata: - ntwk.node[str(node)]['dn_position'] = str(newnodedata['dn_position']) + newnodedata.update(orig.nodes[node]) + if 'dn_fsname' in orig.nodes[node]: + newnodedata['label'] = orig.nodes[node]['dn_fsname'] + ntwk.add_node(str(node), **newnodedata) + if 'dn_position' in ntwk.nodes[str(node)] and 'dn_position' in newnodedata: + ntwk.nodes[str(node)]['dn_position'] = str(newnodedata['dn_position']) for edge in edges: data = {} data = orig.edge[edge[0]][edge[1]] - ntwk.add_edge(str(edge[0]), str(edge[1]), data) + ntwk.add_edge(str(edge[0]), str(edge[1]), **data) if 'fiber_length_mean' in ntwk.edge[str(edge[0])][str(edge[1])]: ntwk.edge[str(edge[0])][str(edge[1])]['fiber_length_mean'] = str(data['fiber_length_mean']) if 'fiber_length_std' in ntwk.edge[str(edge[0])][str(edge[1])]: @@ -125,7 +125,7 @@ def average_networks(in_files, ntwk_res_file, group_id): tmp = nx.read_gpickle(subject) iflogger.info(('File {s} has {n} ' 'edges').format(s=subject, n=tmp.number_of_edges())) - edges = tmp.edges_iter() + edges = list(tmp.edges()) for edge in edges: data = {} data = tmp.edge[edge[0]][edge[1]] @@ -134,29 +134,29 @@ def average_networks(in_files, ntwk_res_file, group_id): current = {} current = ntwk.edge[edge[0]][edge[1]] data = add_dicts_by_key(current, data) - ntwk.add_edge(edge[0], edge[1], data) - nodes = tmp.nodes_iter() + ntwk.add_edge(edge[0], edge[1], **data) + nodes = list(nodes()) for node in nodes: data = {} - data = ntwk.node[node] - if 'value' in tmp.node[node]: - data['value'] = data['value'] + tmp.node[node]['value'] - ntwk.add_node(node, data) + data = ntwk.nodes[node] + if 'value' in tmp.nodes[node]: + data['value'] = data['value'] + tmp.nodes[node]['value'] + ntwk.add_node(node, **data) # Divides each value by the number of files - nodes = ntwk.nodes_iter() - edges = ntwk.edges_iter() + nodes = list(ntwk.nodes()) + edges = list(ntwk.edges()) iflogger.info(('Total network has {n} ' 'edges').format(n=ntwk.number_of_edges())) avg_ntwk = nx.Graph() newdata = {} for node in nodes: - data = ntwk.node[node] + data = ntwk.nodes[node] newdata = data if 'value' in data: newdata['value'] = data['value'] / len(in_files) - ntwk.node[node]['value'] = newdata - avg_ntwk.add_node(node, newdata) + ntwk.nodes[node]['value'] = newdata + avg_ntwk.add_node(node, **newdata) edge_dict = {} edge_dict['count'] = np.zeros((avg_ntwk.number_of_nodes(), @@ -168,12 +168,12 @@ def average_networks(in_files, ntwk_res_file, group_id): if not key == 'count': data[key] = data[key] / len(in_files) ntwk.edge[edge[0]][edge[1]] = data - avg_ntwk.add_edge(edge[0], edge[1], data) + avg_ntwk.add_edge(edge[0], edge[1], **data) edge_dict['count'][edge[0] - 1][edge[1] - 1] = ntwk.edge[edge[0]][edge[1]]['count'] iflogger.info('After thresholding, the average network has has {n} edges'.format(n=avg_ntwk.number_of_edges())) - avg_edges = avg_ntwk.edges_iter() + avg_edges = avg_ntwk.edges() for edge in avg_edges: data = avg_ntwk.edge[edge[0]][edge[1]] for key in list(data.keys()): @@ -319,11 +319,11 @@ def compute_network_measures(ntwk): def add_node_data(node_array, ntwk): node_ntwk = nx.Graph() newdata = {} - for idx, data in ntwk.nodes_iter(data=True): + for idx, data in ntwk.nodes(data=True): if not int(idx) == 0: newdata['value'] = node_array[int(idx) - 1] data.update(newdata) - node_ntwk.add_node(int(idx), data) + node_ntwk.add_node(int(idx), **data) return node_ntwk @@ -339,7 +339,7 @@ def add_edge_data(edge_array, ntwk, above=0, below=0): old_edge_dict = edge_ntwk.edge[x + 1][y + 1] edge_ntwk.remove_edge(x + 1, y + 1) data.update(old_edge_dict) - edge_ntwk.add_edge(x + 1, y + 1, data) + edge_ntwk.add_edge(x + 1, y + 1, **data) return edge_ntwk diff --git a/nipype/interfaces/cmtk/parcellation.py b/nipype/interfaces/cmtk/parcellation.py index 5a510bcdf7..7a2340cb4d 100644 --- a/nipype/interfaces/cmtk/parcellation.py +++ b/nipype/interfaces/cmtk/parcellation.py @@ -213,7 +213,7 @@ def create_roi(subject_id, subjects_dir, fs_dir, parcellation_name, dilation): rois = np.zeros((256, 256, 256), dtype=np.int16) count = 0 - for brk, brv in pg.nodes_iter(data=True): + for brk, brv in pg.nodes(data=True): count = count + 1 iflogger.info(brv) iflogger.info(brk) @@ -429,7 +429,7 @@ def create_wm_mask(subject_id, subjects_dir, fs_dir, parcellation_name): roid = roi.get_data() assert roid.shape[0] == wmmask.shape[0] pg = nx.read_graphml(pgpath) - for brk, brv in pg.nodes_iter(data=True): + for brk, brv in pg.nodes(data=True): if brv['dn_region'] == 'cortical': iflogger.info("Subtracting region %s with intensity value %s" % (brv['dn_region'], brv['dn_correspondence_id'])) diff --git a/nipype/pipeline/engine/tests/test_engine.py b/nipype/pipeline/engine/tests/test_engine.py index cece44444b..90d566ddf9 100644 --- a/nipype/pipeline/engine/tests/test_engine.py +++ b/nipype/pipeline/engine/tests/test_engine.py @@ -316,7 +316,7 @@ def test_disconnect(): flow1 = pe.Workflow(name='test') flow1.connect(a, 'a', b, 'a') flow1.disconnect(a, 'a', b, 'a') - assert flow1._graph.edges() == [] + assert list(flow1._graph.edges()) == [] def test_doubleconnect(): @@ -637,7 +637,7 @@ def func1(in1): n1.inputs.in1 = [1] eg = w1.run() - node = eg.nodes()[0] + node = list(eg.nodes())[0] outjson = glob(os.path.join(node.output_dir(), '_0x*.json')) assert len(outjson) == 1 diff --git a/nipype/pipeline/engine/utils.py b/nipype/pipeline/engine/utils.py index 25b12ab607..f677d6c253 100644 --- a/nipype/pipeline/engine/utils.py +++ b/nipype/pipeline/engine/utils.py @@ -23,9 +23,7 @@ import pickle from functools import reduce import numpy as np -from ...utils.misc import package_check - -package_check('networkx', '1.3') +from distutils.version import LooseVersion import networkx as nx @@ -267,7 +265,7 @@ def _write_detailed_dot(graph, dotfilename): for n in nx.topological_sort(graph): nodename = str(n) inports = [] - for u, v, d in graph.in_edges_iter(nbunch=n, data=True): + for u, v, d in graph.in_edges(nbunch=n, data=True): for cd in d['connect']: if isinstance(cd[0], (str, bytes)): outport = cd[0] @@ -287,7 +285,7 @@ def _write_detailed_dot(graph, dotfilename): inputstr += '| %s' % (replacefunk(ip), ip) inputstr += '}' outports = [] - for u, v, d in graph.out_edges_iter(nbunch=n, data=True): + for u, v, d in graph.out_edges(nbunch=n, data=True): for cd in d['connect']: if isinstance(cd[0], (str, bytes)): outport = cd[0] @@ -446,7 +444,7 @@ def get_levels(G): levels = {} for n in nx.topological_sort(G): levels[n] = 0 - for pred in G.predecessors_iter(n): + for pred in G.predecessors(n): levels[n] = max(levels[n], levels[pred] + 1) return levels @@ -491,9 +489,9 @@ def _merge_graphs(supergraph, nodes, subgraph, nodeid, iterables, raise Exception(("Execution graph does not have a unique set of node " "names. Please rerun the workflow")) edgeinfo = {} - for n in subgraph.nodes(): + for n in list(subgraph.nodes()): nidx = ids.index(n._hierarchy + n._id) - for edge in supergraph.in_edges_iter(supernodes[nidx]): + for edge in supergraph.in_edges(list(supernodes)[nidx]): # make sure edge is not part of subgraph if edge[0] not in subgraph.nodes(): if n._hierarchy + n._id not in list(edgeinfo.keys()): @@ -514,7 +512,7 @@ def _merge_graphs(supergraph, nodes, subgraph, nodeid, iterables, Gc = deepcopy(subgraph) ids = [n._hierarchy + n._id for n in Gc.nodes()] nodeidx = ids.index(nodeid) - rootnode = Gc.nodes()[nodeidx] + rootnode = list(Gc.nodes())[nodeidx] paramstr = '' for key, val in sorted(params.items()): paramstr = '{}_{}_{}'.format( @@ -613,10 +611,10 @@ def _node_ports(graph, node): """ portinputs = {} portoutputs = {} - for u, _, d in graph.in_edges_iter(node, data=True): + for u, _, d in graph.in_edges(node, data=True): for src, dest in d['connect']: portinputs[dest] = (u, src) - for _, v, d in graph.out_edges_iter(node, data=True): + for _, v, d in graph.out_edges(node, data=True): for src, dest in d['connect']: if isinstance(src, tuple): srcport = src[0] @@ -682,7 +680,7 @@ def generate_expanded_graph(graph_in): logger.debug("PE: expanding iterables") graph_in = _remove_nonjoin_identity_nodes(graph_in, keep_iterables=True) # standardize the iterables as {(field, function)} dictionaries - for node in graph_in.nodes_iter(): + for node in graph_in.nodes(): if node.iterables: _standardize_iterables(node) allprefixes = list('abcdefghijklmnopqrstuvwxyz') @@ -697,7 +695,7 @@ def generate_expanded_graph(graph_in): logger.debug("Expanding the iterable node %s..." % inode) # the join successor nodes of the current iterable node - jnodes = [node for node in graph_in.nodes_iter() + jnodes = [node for node in graph_in.nodes() if hasattr(node, 'joinsource') and inode.name == node.joinsource and nx.has_path(graph_in, inode, node)] @@ -709,7 +707,7 @@ def generate_expanded_graph(graph_in): for jnode in jnodes: in_edges = jedge_dict[jnode] = {} edges2remove = [] - for src, dest, data in graph_in.in_edges_iter(jnode, True): + for src, dest, data in graph_in.in_edges(jnode, True): in_edges[src.itername] = data edges2remove.append((src, dest)) @@ -726,7 +724,7 @@ def generate_expanded_graph(graph_in): src_fields = [src_fields] # find the unique iterable source node in the graph try: - iter_src = next((node for node in graph_in.nodes_iter() + iter_src = next((node for node in graph_in.nodes() if node.name == src_name and nx.has_path(graph_in, node, inode))) except StopIteration: @@ -781,7 +779,11 @@ def make_field_func(*pair): inode._id += ('.' + iterable_prefix + 'I') # merge the iterated subgraphs - subgraph = graph_in.subgraph(subnodes) + # dj: the behaviour of .copy changes in version 2 + if LooseVersion(nx.__version__) < LooseVersion('2'): + subgraph = graph_in.subgraph(subnodes) + else: + subgraph = graph_in.subgraph(subnodes).copy() graph_in = _merge_graphs(graph_in, subnodes, subgraph, inode._hierarchy + inode._id, iterables, iterable_prefix, inode.synchronize) @@ -793,7 +795,7 @@ def make_field_func(*pair): old_edge_dict = jedge_dict[jnode] # the edge source node replicates expansions = defaultdict(list) - for node in graph_in.nodes_iter(): + for node in graph_in.nodes(): for src_id, edge_data in list(old_edge_dict.items()): if node.itername.startswith(src_id): expansions[src_id].append(node) @@ -842,7 +844,7 @@ def make_field_func(*pair): logger.debug("Qualified the %s -> %s join field" " %s as %s." % (in_node, jnode, dest_field, slot_field)) - graph_in.add_edge(in_node, jnode, newdata) + graph_in.add_edge(in_node, jnode, **newdata) logger.debug("Connected the join node %s subgraph to the" " expanded join point %s" % (jnode, in_node)) @@ -1283,9 +1285,9 @@ def write_workflow_prov(graph, filename=None, format='all'): # add dependencies (edges) # Process->Process - for idx, edgeinfo in enumerate(graph.in_edges_iter()): - ps.g.wasStartedBy(processes[nodes.index(edgeinfo[1])], - starter=processes[nodes.index(edgeinfo[0])]) + for idx, edgeinfo in enumerate(graph.in_edges()): + ps.g.wasStartedBy(processes[list(nodes).index(edgeinfo[1])], + starter=processes[list(nodes).index(edgeinfo[0])]) # write provenance ps.write_provenance(filename, format=format) @@ -1295,7 +1297,7 @@ def write_workflow_prov(graph, filename=None, format='all'): def topological_sort(graph, depth_first=False): """Returns a depth first sorted order if depth_first is True """ - nodesort = nx.topological_sort(graph) + nodesort = list(nx.topological_sort(graph)) if not depth_first: return nodesort, None logger.debug("Performing depth first search") diff --git a/nipype/pipeline/engine/workflows.py b/nipype/pipeline/engine/workflows.py index e1535b4cf9..17d49b046a 100644 --- a/nipype/pipeline/engine/workflows.py +++ b/nipype/pipeline/engine/workflows.py @@ -36,7 +36,7 @@ from ... import config, logging -from ...utils.misc import (unflatten, package_check, str2bool, +from ...utils.misc import (unflatten, str2bool, getsource, create_function_from_source) from ...interfaces.base import (traits, InputMultiPath, CommandLine, Undefined, TraitedSpec, DynamicTraitedSpec, @@ -58,7 +58,6 @@ from .base import EngineBase from .nodes import Node, MapNode -package_check('networkx', '1.3') logger = logging.getLogger('workflow') class Workflow(EngineBase): @@ -185,7 +184,7 @@ def connect(self, *args, **kwargs): # check to see which ports of destnode are already # connected. if not disconnect and (destnode in self._graph.nodes()): - for edge in self._graph.in_edges_iter(destnode): + for edge in self._graph.in_edges(destnode): data = self._graph.get_edge_data(*edge) for sourceinfo, destname in data['connect']: if destname not in connected_ports[destnode]: @@ -506,8 +505,8 @@ def export(self, filename=None, prefix="output", format="python", else: lines.append(line) # write connections - for u, _, d in flatgraph.in_edges_iter(nbunch=node, - data=True): + for u, _, d in flatgraph.in_edges(nbunch=node, + data=True): for cd in d['connect']: if isinstance(cd[0], tuple): args = list(cd[0]) @@ -633,7 +632,7 @@ def _write_report_info(self, workingdir, name, graph): total=N, name='Group_%05d' % gid)) json_dict['maxN'] = maxN - for u, v in graph.in_edges_iter(): + for u, v in graph.in_edges(): json_dict['links'].append(dict(source=nodes.index(u), target=nodes.index(v), value=1)) @@ -654,7 +653,7 @@ def getname(u, i): json_dict = [] for i, node in enumerate(nodes): imports = [] - for u, v in graph.in_edges_iter(nbunch=node): + for u, v in graph.in_edges(nbunch=node): imports.append(getname(u, nodes.index(u))) json_dict.append(dict(name=getname(node, i), size=1, @@ -669,7 +668,7 @@ def _set_needed_outputs(self, graph): return for node in graph.nodes(): node.needed_outputs = [] - for edge in graph.out_edges_iter(node): + for edge in graph.out_edges(node): data = graph.get_edge_data(*edge) sourceinfo = [v1[0] if isinstance(v1, tuple) else v1 for v1, v2 in data['connect']] @@ -683,7 +682,7 @@ def _configure_exec_nodes(self, graph): """ for node in graph.nodes(): node.input_source = {} - for edge in graph.in_edges_iter(node): + for edge in graph.in_edges(node): data = graph.get_edge_data(*edge) for sourceinfo, field in data['connect']: node.input_source[field] = \ @@ -758,8 +757,8 @@ def _get_inputs(self): setattr(inputdict, node.name, node.inputs) else: taken_inputs = [] - for _, _, d in self._graph.in_edges_iter(nbunch=node, - data=True): + for _, _, d in self._graph.in_edges(nbunch=node, + data=True): for cd in d['connect']: taken_inputs.append(cd[1]) unconnectedinputs = TraitedSpec() @@ -864,7 +863,8 @@ def _generate_flatgraph(self): # use in_edges instead of in_edges_iter to allow # disconnections to take place properly. otherwise, the # edge dict is modified. - for u, _, d in self._graph.in_edges(nbunch=node, data=True): + # dj: added list() for networkx ver.2 + for u, _, d in list(self._graph.in_edges(nbunch=node, data=True)): logger.debug('in: connections-> %s', to_str(d['connect'])) for cd in deepcopy(d['connect']): logger.debug("in: %s", to_str(cd)) @@ -877,7 +877,8 @@ def _generate_flatgraph(self): self.disconnect(u, cd[0], node, cd[1]) self.connect(srcnode, srcout, dstnode, dstin) # do not use out_edges_iter for reasons stated in in_edges - for _, v, d in self._graph.out_edges(nbunch=node, data=True): + # dj: for ver 2 use list(out_edges) + for _, v, d in list(self._graph.out_edges(nbunch=node, data=True)): logger.debug('out: connections-> %s', to_str(d['connect'])) for cd in deepcopy(d['connect']): logger.debug("out: %s", to_str(cd)) @@ -965,7 +966,7 @@ def _get_dot(self, prefix=None, hierarchy=None, colored=False, simple_form=simple_form, level=level + 3)) dotlist.append('}') else: - for subnode in self._graph.successors_iter(node): + for subnode in self._graph.successors(node): if node._hierarchy != subnode._hierarchy: continue if not isinstance(subnode, Workflow): @@ -980,7 +981,7 @@ def _get_dot(self, prefix=None, hierarchy=None, colored=False, subnodename)) logger.debug('connection: %s', dotlist[-1]) # add between workflow connections - for u, v, d in self._graph.edges_iter(data=True): + for u, v, d in self._graph.edges(data=True): uname = '.'.join(hierarchy + [u.fullname]) vname = '.'.join(hierarchy + [v.fullname]) for src, dest in d['connect']: diff --git a/nipype/pipeline/plugins/tests/test_linear.py b/nipype/pipeline/plugins/tests/test_linear.py index e4df3f7db3..2e2fead4eb 100644 --- a/nipype/pipeline/plugins/tests/test_linear.py +++ b/nipype/pipeline/plugins/tests/test_linear.py @@ -41,6 +41,6 @@ def test_run_in_series(tmpdir): mod1.inputs.input1 = 1 execgraph = pipe.run(plugin="Linear") names = ['.'.join((node._hierarchy, node.name)) for node in execgraph.nodes()] - node = execgraph.nodes()[names.index('pipe.mod1')] + node = list(execgraph.nodes())[names.index('pipe.mod1')] result = node.get_output('output1') assert result == [1, 1] diff --git a/nipype/pipeline/plugins/tests/test_multiproc.py b/nipype/pipeline/plugins/tests/test_multiproc.py index 20ea72a929..6dab555a11 100644 --- a/nipype/pipeline/plugins/tests/test_multiproc.py +++ b/nipype/pipeline/plugins/tests/test_multiproc.py @@ -46,7 +46,7 @@ def test_run_multiproc(tmpdir): pipe.config['execution']['poll_sleep_duration'] = 2 execgraph = pipe.run(plugin="MultiProc") names = ['.'.join((node._hierarchy, node.name)) for node in execgraph.nodes()] - node = execgraph.nodes()[names.index('pipe.mod1')] + node = list(execgraph.nodes())[names.index('pipe.mod1')] result = node.get_output('output1') assert result == [1, 1] diff --git a/nipype/pipeline/plugins/tests/test_multiproc_nondaemon.py b/nipype/pipeline/plugins/tests/test_multiproc_nondaemon.py index f8dd22ed66..7112aa2448 100644 --- a/nipype/pipeline/plugins/tests/test_multiproc_nondaemon.py +++ b/nipype/pipeline/plugins/tests/test_multiproc_nondaemon.py @@ -123,7 +123,7 @@ def run_multiproc_nondaemon_with_flag(nondaemon_flag): 'non_daemon': nondaemon_flag}) names = ['.'.join((node._hierarchy, node.name)) for node in execgraph.nodes()] - node = execgraph.nodes()[names.index('pipe.f2')] + node = list(execgraph.nodes())[names.index('pipe.f2')] result = node.get_output('sum_out') os.chdir(cur_dir) rmtree(temp_dir) diff --git a/nipype/pipeline/plugins/tests/test_oar.py b/nipype/pipeline/plugins/tests/test_oar.py index 68dc98c344..181aff0f6f 100644 --- a/nipype/pipeline/plugins/tests/test_oar.py +++ b/nipype/pipeline/plugins/tests/test_oar.py @@ -50,7 +50,7 @@ def test_run_oar(): '.'.join((node._hierarchy, node.name)) for node in execgraph.nodes() ] - node = execgraph.nodes()[names.index('pipe.mod1')] + node = list(execgraph.nodes())[names.index('pipe.mod1')] result = node.get_output('output1') assert result == [1, 1] os.chdir(cur_dir) diff --git a/nipype/pipeline/plugins/tests/test_pbs.py b/nipype/pipeline/plugins/tests/test_pbs.py index d7b5a83528..719ffbfc72 100644 --- a/nipype/pipeline/plugins/tests/test_pbs.py +++ b/nipype/pipeline/plugins/tests/test_pbs.py @@ -48,7 +48,7 @@ def test_run_pbsgraph(): mod1.inputs.input1 = 1 execgraph = pipe.run(plugin="PBSGraph") names = ['.'.join((node._hierarchy, node.name)) for node in execgraph.nodes()] - node = execgraph.nodes()[names.index('pipe.mod1')] + node = list(execgraph.nodes())[names.index('pipe.mod1')] result = node.get_output('output1') assert result == [1, 1] os.chdir(cur_dir) diff --git a/nipype/pipeline/plugins/tests/test_somaflow.py b/nipype/pipeline/plugins/tests/test_somaflow.py index f8309bf826..f2d5c945fb 100644 --- a/nipype/pipeline/plugins/tests/test_somaflow.py +++ b/nipype/pipeline/plugins/tests/test_somaflow.py @@ -46,6 +46,6 @@ def test_run_somaflow(tmpdir): mod1.inputs.input1 = 1 execgraph = pipe.run(plugin="SomaFlow") names = ['.'.join((node._hierarchy, node.name)) for node in execgraph.nodes()] - node = execgraph.nodes()[names.index('pipe.mod1')] + node = list(execgraph.nodes())[names.index('pipe.mod1')] result = node.get_output('output1') assert result == [1, 1] diff --git a/nipype/utils/misc.py b/nipype/utils/misc.py index 552e24c435..c73e643763 100644 --- a/nipype/utils/misc.py +++ b/nipype/utils/misc.py @@ -188,7 +188,7 @@ def package_check(pkg_name, version=None, app=None, checker=LooseVersion, Examples -------- package_check('numpy', '1.3') - package_check('networkx', '1.0', 'tutorial1') + package_check('scipy', '0.7', 'tutorial1') """ diff --git a/requirements.txt b/requirements.txt index a697b62244..a5ac0a5683 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,6 +1,6 @@ numpy>=1.9.0 scipy>=0.11 -networkx>=1.7 +networkx>=1.9 traits>=4.6 python-dateutil>=1.5 nibabel>=2.1.0 @@ -13,4 +13,5 @@ configparser pytest>=3.0 mock pydotplus +pydot>=1.2.3 packaging diff --git a/rtd_requirements.txt b/rtd_requirements.txt index a002562f3e..b36047b653 100644 --- a/rtd_requirements.txt +++ b/rtd_requirements.txt @@ -1,6 +1,6 @@ numpy>=1.6.2 scipy>=0.11 -networkx>=1.7 +networkx>=1.9 traits>=4.6 python-dateutil>=1.5 nibabel>=2.1.0 @@ -12,6 +12,7 @@ configparser pytest>=3.0 mock pydotplus +pydot>=1.2.3 psutil matplotlib packaging