Skip to content

Commit

Permalink
transformed into valid Python3 code
Browse files Browse the repository at this point in the history
  • Loading branch information
Fynardo committed Oct 27, 2017
1 parent 56db1b0 commit 851e2f6
Show file tree
Hide file tree
Showing 12 changed files with 37 additions and 37 deletions.
10 changes: 5 additions & 5 deletions pygraph/classes/directed_graph.py
Original file line number Diff line number Diff line change
Expand Up @@ -126,11 +126,11 @@ def get_node(self, node_id):

def get_all_node_ids(self):
"""Returns a list of all the node ids in the graph."""
return self.nodes.keys()
return list(self.nodes.keys())

def get_all_node_objects(self):
"""Returns a list of all the node objects in the graph."""
return self.nodes.values()
return list(self.nodes.values())

def get_edge(self, edge_id):
"""Returns the edge object identified by "edge_id"."""
Expand All @@ -142,11 +142,11 @@ def get_edge(self, edge_id):

def get_all_edge_ids(self):
"""Returns a list of all the edge ids in the graph"""
return self.edges.keys()
return list(self.edges.keys())

def get_all_edge_objects(self):
"""Returns a list of all the edge objects in the graph."""
return self.edges.values()
return list(self.edges.values())

def delete_edge_by_id(self, edge_id):
"""Removes the edge identified by "edge_id" from the graph."""
Expand Down Expand Up @@ -189,7 +189,7 @@ def delete_node(self, node_id):
self.delete_edge_by_id(e)

# Remove all edges to the node
edges = [edge_id for edge_id, edge in self.edges.items() if edge['vertices'][1] == node_id]
edges = [edge_id for edge_id, edge in list(self.edges.items()) if edge['vertices'][1] == node_id]
for e in edges:
self.delete_edge_by_id(e)

Expand Down
2 changes: 1 addition & 1 deletion pygraph/functions/biconnected_components.py
Original file line number Diff line number Diff line change
Expand Up @@ -282,7 +282,7 @@ def _internal_get_cut_vertex_list(graph):

# The root node gets special treatment; it's a cut vertex iff it has multiple children
if len(children[root_dfs_count]) > 1:
for node_id, dfs in depth.items():
for node_id, dfs in list(depth.items()):
if dfs == root_dfs_count:
list_of_cut_vertices.add(node_id)
break
Expand Down
2 changes: 1 addition & 1 deletion pygraph/functions/connected_components.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ def get_connected_components_as_subgraphs(graph):

for c in components:
edge_ids = set()
nodes = map(lambda node: graph.get_node(node), c)
nodes = [graph.get_node(node) for node in c]
for n in nodes:
# --Loop through the edges in each node, to determine if it should be included
for e in n['edges']:
Expand Down
12 changes: 6 additions & 6 deletions pygraph/functions/planarity/kocay_algorithm.py
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,7 @@ def __sort_adjacency_lists(dfs_data):
edge_weights = dfs_data['edge_weights']
edge_lookup = dfs_data['edge_lookup']

for node_id, adj_list in adjacency_lists.items():
for node_id, adj_list in list(adjacency_lists.items()):
node_weight_lookup = {}
frond_lookup = {}
for node_b in adj_list:
Expand Down Expand Up @@ -751,8 +751,8 @@ def __get_dfs_data(graph, adj=None):
* 'children_lookup': A lookup dict mapping nodes to their children
"""
ordering, parent_lookup, children_lookup = depth_first_search_with_parent_data(graph, adjacency_lists=adj)
ordering_lookup = dict(zip(ordering, range(1, len(ordering) + 1)))
node_lookup = dict(zip(range(1, len(ordering) + 1), ordering))
ordering_lookup = dict(list(zip(ordering, list(range(1, len(ordering) + 1)))))
node_lookup = dict(list(zip(list(range(1, len(ordering) + 1)), ordering)))
edge_lookup = {}

for edge_id in graph.get_all_edge_ids():
Expand Down Expand Up @@ -1007,7 +1007,7 @@ def _L(dfs_data):
"""L(T) contains leaves and branch points for the DFS-tree T."""
"""L(T) = {v | the first w in Adj[v] corresponds to a frond vw}."""
node_set = set()
for v, adj in dfs_data['adj'].items():
for v, adj in list(dfs_data['adj'].items()):
w = adj[0]
if is_frond(v, w, dfs_data):
node_set.add(v)
Expand Down Expand Up @@ -1058,7 +1058,7 @@ def fn_x(i, dfs_data):
"""The minimum vertex (DFS-number) in a frond contained in Ri."""
try:
return R(i, dfs_data)['x']
except Exception, e:
except Exception as e:
# Page 17 states that if Ri is empty, then we take xi to be n
return dfs_data['graph'].num_nodes()

Expand All @@ -1067,7 +1067,7 @@ def y(i, dfs_data):
"""The maximum vertex (DFS-number) in a frond contained in Ri."""
try:
return R(i, dfs_data)['y']
except Exception, e:
except Exception as e:
# Page 17 states that if Ri is empty, then we take yi to be 0
return 0

Expand Down
2 changes: 1 addition & 1 deletion pygraph/functions/spanning_tree.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ def find_minimum_spanning_forest(graph):
def find_minimum_spanning_forest_as_subgraphs(graph):
"""Calculates the minimum spanning forest and returns a list of trees as subgraphs."""
forest = find_minimum_spanning_forest(graph)
list_of_subgraphs = map(lambda edge_list: get_subgraph_from_edge_list(graph, edge_list), forest)
list_of_subgraphs = [get_subgraph_from_edge_list(graph, edge_list) for edge_list in forest]

return list_of_subgraphs

Expand Down
14 changes: 7 additions & 7 deletions pygraph/helpers/functions.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,12 +14,12 @@ def make_subgraph(graph, vertices, edges):
local_graph = copy.deepcopy(graph)

# Remove all the edges that aren't in the list
edges_to_delete = filter(lambda x: x not in edges, local_graph.get_all_edge_ids())
edges_to_delete = [x for x in local_graph.get_all_edge_ids() if x not in edges]
for e in edges_to_delete:
local_graph.delete_edge_by_id(e)

# Remove all the vertices that aren't in the list
nodes_to_delete = filter(lambda x: x not in vertices, local_graph.get_all_node_ids())
nodes_to_delete = [x for x in local_graph.get_all_node_ids() if x not in vertices]
for n in nodes_to_delete:
local_graph.delete_node(n)

Expand Down Expand Up @@ -144,12 +144,12 @@ def create_graph_from_adjacency_matrix(adjacency_matrix):
node_column_mapping = []

num_columns = len(adjacency_matrix)
for _ in xrange(num_columns):
for _ in range(num_columns):
node_id = graph.new_node()
node_column_mapping.append(node_id)

for j in xrange(num_columns):
for i in xrange(num_columns):
for j in range(num_columns):
for i in range(num_columns):
if adjacency_matrix[j][i]:
jnode_id = node_column_mapping[j]
inode_id = node_column_mapping[i]
Expand All @@ -173,8 +173,8 @@ def is_adjacency_matrix_symmetric(adjacency_matrix):
# Loop through the bottom half of the matrix and compare it to the top half
# --We do the bottom half because of how we construct adjacency matrices
max_i = 0
for j in xrange(num_columns):
for i in xrange(max_i):
for j in range(num_columns):
for i in range(max_i):
# If i == j, we can skip ahead so we don't compare with ourself
if i == j:
continue
Expand Down
8 changes: 4 additions & 4 deletions pygraph/predefined_graphs.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ def build_cycle_graph(num_nodes):
first_node = graph.new_node()
if num_nodes > 1:
previous_node = first_node
for _ in xrange(num_nodes - 1):
for _ in range(num_nodes - 1):
new_node = graph.new_node()
graph.new_edge(previous_node, new_node)
previous_node = new_node
Expand Down Expand Up @@ -95,7 +95,7 @@ def build_k5_graph():
graph = UndirectedGraph()

# K5 has 5 nodes
for _ in xrange(5):
for _ in range(5):
graph.new_node()

# K5 has 10 edges
Expand Down Expand Up @@ -129,7 +129,7 @@ def build_k33_graph():
graph = UndirectedGraph()

# K3,3 has 6 nodes
for _ in xrange(1, 7):
for _ in range(1, 7):
graph.new_node()

# K3,3 has 9 edges
Expand Down Expand Up @@ -161,7 +161,7 @@ def build_groetzch_graph():
# build it via adjacency matrix specification

# -- Initialize the matrix to all zeros
adj = [[0 for _ in xrange(11)] for _ in xrange(11)]
adj = [[0 for _ in range(11)] for _ in range(11)]

# -- Add individual edge connections
row_connections = []
Expand Down
6 changes: 3 additions & 3 deletions pygraph/render.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,14 +3,14 @@

def graph_to_dot(graph, node_renderer=None, edge_renderer=None):
"""Produces a DOT specification string from the provided graph."""
node_pairs = graph.nodes.items()
edge_pairs = graph.edges.items()
node_pairs = list(graph.nodes.items())
edge_pairs = list(graph.edges.items())

if node_renderer is None:
node_renderer_wrapper = lambda nid: ''
else:
node_renderer_wrapper = lambda nid: ' [%s]' % ','.join(
map(lambda tpl: '%s=%s' % tpl, node_renderer(graph, nid).items()))
['%s=%s' % tpl for tpl in list(node_renderer(graph, nid).items())])

# Start the graph
graph_string = 'digraph G {\n'
Expand Down
2 changes: 1 addition & 1 deletion tests/test_biconnected_components.py
Original file line number Diff line number Diff line change
Expand Up @@ -102,7 +102,7 @@ def test_fully_biconnected_graph(self):
the entire graph for a fully biconnected graph?"""
graph = utility_functions.build_fully_biconnected_test_graph()

expected_edges = range(1, 20) # There are 19 edges in the test graph, so their IDs go from 1-19
expected_edges = list(range(1, 20)) # There are 19 edges in the test graph, so their IDs go from 1-19
calculated_edges = find_biconnected_components(graph)

# Verify that there is only a single component in the calculated edge list
Expand Down
6 changes: 3 additions & 3 deletions tests/test_dfs.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,8 +41,8 @@ def test_dfs_depth_ordering(self):
graph = utility_functions.build_biconnected_test_graph()

ordering = depth_first_search(graph, 1)
node_lookup_by_index = dict(zip(range(1, len(ordering) + 1), ordering))
items_sorted_by_dfs_index = zip(ordering, range(1, len(ordering) + 1))
node_lookup_by_index = dict(list(zip(list(range(1, len(ordering) + 1)), ordering)))
items_sorted_by_dfs_index = list(zip(ordering, list(range(1, len(ordering) + 1))))
# index_lookup_by_node = dict(items_sorted_by_dfs_index)

visited_by_node = defaultdict(lambda: False)
Expand All @@ -63,7 +63,7 @@ def in_same_component(node_a, node_b):
successor_node_id = node_lookup_by_index[dfs_index+1]
if in_same_component(node_id, successor_node_id):
neighbor_nodes = graph.neighbors(node_id)
has_unvisited_neighbors = any(map(lambda n: not visited_by_node[n], neighbor_nodes))
has_unvisited_neighbors = any([not visited_by_node[n] for n in neighbor_nodes])
if has_unvisited_neighbors:
self.assertIn(successor_node_id, neighbor_nodes)

Expand Down
2 changes: 1 addition & 1 deletion tests/test_planarity.py
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,7 @@ def test_really_large_cycle_graph_is_planar(self):
planarity = is_planar(graph)

self.assertEqual(expected, planarity)
except RuntimeError, e:
except RuntimeError as e:
if e.args[0] == 'maximum recursion depth exceeded':
#Large graphs cause recursion errors. Exception caught, as expected.
pass
Expand Down
8 changes: 4 additions & 4 deletions tests/utility_functions.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ def build_simple_test_graph(directed=False):
graph = UndirectedGraph()

# There are 7 vertices in the test graph
for _ in xrange(7):
for _ in range(7):
graph.new_node()

# There are 4 edges in the test graph
Expand Down Expand Up @@ -96,7 +96,7 @@ def build_biconnected_test_graph(directed=False):
graph = UndirectedGraph()

# There are 12 vertices in the test graph
for _ in xrange(12):
for _ in range(12):
graph.new_node()

# Nodes 1,2,3 form the first component
Expand Down Expand Up @@ -196,7 +196,7 @@ def build_complicated_test_graph_with_one_mst(directed=False):
else:
graph = UndirectedGraph()

for _ in xrange(7):
for _ in range(7):
graph.new_node()

graph.new_edge(1, 2, 2) # 1
Expand Down Expand Up @@ -253,7 +253,7 @@ def build_petersons_graph():
graph = build_5_cycle_graph()

# --Build a 5-pointed star
for _ in xrange(5):
for _ in range(5):
graph.new_node()
graph.new_edge(6, 8)
graph.new_edge(6, 9)
Expand Down

0 comments on commit 851e2f6

Please sign in to comment.