Skip to content

Commit

Permalink
Update pattern fusing algo for quantization (openvinotoolkit#733)
Browse files Browse the repository at this point in the history
* Debug

* Test version

* Test version

* Update search_all func

* Update according rebase changes

* Update reference graphs. Now FQ are put after fused BN + RELU pattern

* Improve code style

* Fix pylint

* Fix pylint x2

* Update according Andrey's comments

* Remove unnecessary lines
  • Loading branch information
kshpv committed Jun 7, 2021
1 parent 0cbab36 commit 139c390
Show file tree
Hide file tree
Showing 17 changed files with 1,072 additions and 1,031 deletions.
4 changes: 2 additions & 2 deletions nncf/torch/graph/graph.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@
from nncf.torch.graph.graph_matching import Expression
from nncf.torch.graph.graph_matching import NodeExpression
from nncf.torch.graph.graph_matching import get_edge_boundaries
from nncf.torch.graph.graph_matching import search_all
from nncf.torch.graph.graph_matching import find_subgraphs_match_expression


# pylint: disable=too-many-public-methods
Expand Down Expand Up @@ -214,7 +214,7 @@ def get_successor_nncf_nodes(self, node_id: int) -> List[NNCFNode]:
return nncf_nodes

def get_matching_nncf_graph_pattern_io_list(self, expression: Expression) -> List[NNCFGraphPatternIO]:
matched_node_key_sequences = search_all(self._nx_graph, expression)
matched_node_key_sequences = find_subgraphs_match_expression(self._nx_graph, expression)
pattern_ios = [self._get_nncf_graph_pattern_io_list(match) for match in matched_node_key_sequences]
return pattern_ios

Expand Down
110 changes: 85 additions & 25 deletions nncf/torch/graph/graph_matching.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,19 +44,28 @@ def __and__(self, other):
def _iterate_alternatives(self, nodes):
return powerset(nodes, min_r=1)

def match(self, nodes, graph):
def _find_all_matches(self, nodes, graph):
all_matches = []
for n in self._iterate_alternatives(nodes):
result = self._match(n, graph)
if not result:
continue
if not isinstance(result, list):
result = [result]
for res in result:
n, following = res
following = list(following)
if not isinstance(n, list):
n = [n]

all_matches.append((n, following))
return all_matches

n, following = result
following = list(following)
if not isinstance(n, list):
n = [n]
def all_matches(self, nodes, graph):
return self._find_all_matches(nodes, graph)

all_matches.append((n, following))
def match(self, nodes, graph):
all_matches = self._find_all_matches(nodes, graph)
if not all_matches:
return None, None
return max(all_matches, key=lambda x: len(x[0]))
Expand Down Expand Up @@ -106,7 +115,7 @@ def _match(self, nodes, graph):
if self.greedy_consume:
if not all_matches:
return None
return max(all_matches, key=lambda x: len(x[0]))
return all_matches
return None

def __or__(self, other):
Expand Down Expand Up @@ -215,21 +224,72 @@ def get_edge_boundaries(match: List[str], graph: nx.DiGraph):
return in_edge_boundary, out_edge_boundary


def search_all(graph: nx.DiGraph, expression: Expression) -> List[List[str]]:
"""Returns list of node key lists that match the expression."""
matches = []
matched_nodes = set()
weakly_subgraphs = [graph.subgraph(c) for c in nx.weakly_connected_components(graph)]
for subgraph in weakly_subgraphs:
dfs_order = nx.topological_sort(subgraph)
for node in dfs_order:
match, _ = expression.match([node], graph)

if node in matched_nodes:
continue

if match:
for mn in match:
matched_nodes.add(mn)
matches.append(match)
return matches
def find_whether_subgraph_has_inner_outgoing_edges(graph: nx.DiGraph, subgraph: List[str]) -> bool:
"""
Check out whether the subgraph has outgoing edges starting not from the last node.
Example:
(conv2d + BN + ReLU pattern):
...
|
(conv2d)
|------\
(BN) |
| |
(RELU) |
| |
(cat)----/
|
...
:param graph: The model graph.
:param subgraph: A subgraph of the model graph.
:return: True if the subgraph contains outgoing edges starting not from the last node,
False - otherwise.
"""
for node_key in subgraph[:-1]:
successors = list(graph.succ[node_key].keys())
for successors_key in successors:
if successors_key not in subgraph:
return True

# Breaking input edges
for node_key in subgraph[1:]:
predecessors = list(graph.pred[node_key].keys())
for predecessors_key in predecessors:
if predecessors_key not in subgraph:
return True
return False


def find_subgraphs_match_expression(graph: nx.DiGraph, expression: Expression) -> List[List[str]]:
"""
Find a list of subgraphs for the particular graph that match the pattern expression.
:param graph: The model graph.
:param expression: A pattern expression containing a logic of layer fusing.
:return: A list of subgraphs for the particular graph, matching the pattern expression.
"""
subgraphs = []
subgraphs_nodes = set()
nodes = nx.topological_sort(graph)
for node in nodes:
# If a node has already been added to any pattern skip this node
if node in subgraphs_nodes:
continue

all_matches = expression.all_matches([node], graph)
all_matches = sorted(all_matches, key=lambda x: len(x[0]), reverse=True)

longest_valid_match = None
for match in all_matches:
# Find out the longest valid pattern
if not find_whether_subgraph_has_inner_outgoing_edges(graph, match[0]):
longest_valid_match = match
break
# If there is no pattern found, then skip this node
if longest_valid_match is None:
continue

for mn in longest_valid_match[0]:
subgraphs_nodes.add(mn)

subgraphs.append(longest_valid_match[0])
return subgraphs
42 changes: 2 additions & 40 deletions nncf/torch/nncf_network.py
Original file line number Diff line number Diff line change
Expand Up @@ -206,21 +206,6 @@ def __init__(self, nncf_graph: NNCFGraph):
if ia_op_exec_context.operator_name == MODEL_INPUT_OP_NAME:
self._input_ips.append(post_hook_insertion_point)

def _base_graph_match_has_breaking_edges(self, match):
# Breaking output edges
for node_key in match[:-1]:
succs = list(self._base_nx_graph.succ[node_key].keys())
for succ_key in succs:
if succ_key not in match:
return True

# Breaking input edges
for node_key in match[1:]:
preds = list(self._base_nx_graph.pred[node_key].keys())
for pred_key in preds:
if pred_key not in match:
return True
return False

def get_ip_graph_with_merged_hw_optimized_operations(self,
hw_config: Optional[HWConfig] = None,
Expand All @@ -229,38 +214,15 @@ def get_ip_graph_with_merged_hw_optimized_operations(self,
# pylint:disable=too-many-branches
merged_ip_graph = deepcopy(self)
pattern = self._get_mergeable_operator_patterns(hw_config, additional_patterns)
from nncf.torch.graph.graph_matching import search_all
matches = search_all(self._base_nx_graph, pattern)
from nncf.torch.graph.graph_matching import find_subgraphs_match_expression
matches = find_subgraphs_match_expression(self._base_nx_graph, pattern)
for match in matches:
if len(match) == 1:
continue

input_node_key = match[0]
output_node_key = match[-1]

# If a subgraph has output edges in its middle, should skip merging it
# Example (conv2d + BN + relu pattern):
# (conv2d)
# |------\
# (BN) |
# | |
# (RELU) |
# | |
# (cat)----/
# |
# ...

# Same for input edges (linear + add pattern):
# (linear) (linear)
# | |
# \----(add)---/
# |
# ...
has_breaking_output_edges = self._base_graph_match_has_breaking_edges(match)

if has_breaking_output_edges:
continue

in_edges = list(self.in_edges(input_node_key))
out_edges = list(self.out_edges(output_node_key))

Expand Down
4 changes: 2 additions & 2 deletions nncf/torch/quantization/metrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -363,11 +363,11 @@ def _marking_edges(self, graph, node_key, queue, mark=True):

def get_merged_original_graph_with_patterns(self, original_graph: PTNNCFGraph):
import nncf.torch.graph.patterns as p
from nncf.torch.graph.graph_matching import search_all
from nncf.torch.graph.graph_matching import find_subgraphs_match_expression

pattern = p.LINEAR_OPS + p.ANY_BN_ACT_COMBO | p.LINEAR_OPS + p.ELTWISE_UNIFORM_OPS
# pylint: disable=protected-access
matches = search_all(original_graph._nx_graph, pattern)
matches = find_subgraphs_match_expression(original_graph._nx_graph, pattern)
merged_graph = deepcopy(original_graph._nx_graph)
nx.set_node_attributes(merged_graph, False, self.IS_MERGED_GRAPH_ATTR)
for match in matches:
Expand Down
Original file line number Diff line number Diff line change
@@ -1,38 +1,32 @@
strict digraph {
"0 /A_0" [associated_ip_node_keys="{'POST HOOK 0 /A_0'}", node_type="InsertionPointGraphNodeType.OPERATOR", regular_node_data="0 /A_0"];
"1 /conv2d_0" [associated_ip_node_keys="{'PRE HOOK 0 1 /conv2d_0', 'POST HOOK 1 /conv2d_0'}", node_type="InsertionPointGraphNodeType.OPERATOR", regular_node_data="1 /conv2d_0"];
"2 /batch_norm_0" [associated_ip_node_keys="{'PRE HOOK 0 2 /batch_norm_0', 'POST HOOK 2 /batch_norm_0'}", node_type="InsertionPointGraphNodeType.OPERATOR", regular_node_data="2 /batch_norm_0"];
"3 /RELU_0" [associated_ip_node_keys="{'POST HOOK 3 /RELU_0', 'PRE HOOK 0 3 /RELU_0'}", node_type="InsertionPointGraphNodeType.OPERATOR", regular_node_data="3 /RELU_0"];
"4 /C_0" [associated_ip_node_keys="{'PRE HOOK 1 4 /C_0', 'PRE HOOK 0 4 /C_0', 'POST HOOK 4 /C_0'}", node_type="InsertionPointGraphNodeType.OPERATOR", regular_node_data="4 /C_0"];
"1 /conv2d_0" [associated_ip_node_keys="{'POST HOOK 1 /conv2d_0', 'PRE HOOK 0 1 /conv2d_0'}", node_type="InsertionPointGraphNodeType.OPERATOR", regular_node_data="1 /conv2d_0"];
"4 /C_0" [associated_ip_node_keys="{'PRE HOOK 1 4 /C_0', 'POST HOOK 4 /C_0', 'PRE HOOK 0 4 /C_0'}", node_type="InsertionPointGraphNodeType.OPERATOR", regular_node_data="4 /C_0"];
"5 /B_0" [associated_ip_node_keys="{'PRE HOOK 0 5 /B_0', 'POST HOOK 5 /B_0'}", node_type="InsertionPointGraphNodeType.OPERATOR", regular_node_data="5 /B_0"];
"POST HOOK 0 /A_0" [insertion_point_data="TargetType.OPERATOR_POST_HOOK /A_0", node_type="InsertionPointGraphNodeType.INSERTION_POINT"];
"PRE HOOK 0 1 /conv2d_0" [insertion_point_data="TargetType.OPERATOR_PRE_HOOK 0 /conv2d_0", node_type="InsertionPointGraphNodeType.INSERTION_POINT"];
"POST HOOK 1 /conv2d_0" [insertion_point_data="TargetType.OPERATOR_POST_HOOK /conv2d_0", node_type="InsertionPointGraphNodeType.INSERTION_POINT"];
"PRE HOOK 0 2 /batch_norm_0" [insertion_point_data="TargetType.OPERATOR_PRE_HOOK 0 /batch_norm_0", node_type="InsertionPointGraphNodeType.INSERTION_POINT"];
"POST HOOK 2 /batch_norm_0" [insertion_point_data="TargetType.OPERATOR_POST_HOOK /batch_norm_0", node_type="InsertionPointGraphNodeType.INSERTION_POINT"];
"PRE HOOK 0 3 /RELU_0" [insertion_point_data="TargetType.OPERATOR_PRE_HOOK 0 /RELU_0", node_type="InsertionPointGraphNodeType.INSERTION_POINT"];
"POST HOOK 3 /RELU_0" [insertion_point_data="TargetType.OPERATOR_POST_HOOK /RELU_0", node_type="InsertionPointGraphNodeType.INSERTION_POINT"];
"PRE HOOK 0 4 /C_0" [insertion_point_data="TargetType.OPERATOR_PRE_HOOK 0 /C_0", node_type="InsertionPointGraphNodeType.INSERTION_POINT"];
"PRE HOOK 1 4 /C_0" [insertion_point_data="TargetType.OPERATOR_PRE_HOOK 1 /C_0", node_type="InsertionPointGraphNodeType.INSERTION_POINT"];
"POST HOOK 4 /C_0" [insertion_point_data="TargetType.OPERATOR_POST_HOOK /C_0", node_type="InsertionPointGraphNodeType.INSERTION_POINT"];
"PRE HOOK 0 5 /B_0" [insertion_point_data="TargetType.OPERATOR_PRE_HOOK 0 /B_0", node_type="InsertionPointGraphNodeType.INSERTION_POINT"];
"POST HOOK 5 /B_0" [insertion_point_data="TargetType.OPERATOR_POST_HOOK /B_0", node_type="InsertionPointGraphNodeType.INSERTION_POINT"];
"2 /batch_norm_0\n3 /RELU_0\n" [associated_ip_node_keys="{'POST HOOK 3 /RELU_0', 'PRE HOOK 0 2 /batch_norm_0'}", node_type="InsertionPointGraphNodeType.OPERATOR", regular_node_data="2 /batch_norm_0"];
"0 /A_0" -> "POST HOOK 0 /A_0";
"1 /conv2d_0" -> "POST HOOK 1 /conv2d_0";
"2 /batch_norm_0" -> "POST HOOK 2 /batch_norm_0";
"3 /RELU_0" -> "POST HOOK 3 /RELU_0";
"4 /C_0" -> "POST HOOK 4 /C_0";
"5 /B_0" -> "POST HOOK 5 /B_0";
"POST HOOK 0 /A_0" -> "PRE HOOK 0 1 /conv2d_0";
"PRE HOOK 0 1 /conv2d_0" -> "1 /conv2d_0";
"POST HOOK 1 /conv2d_0" -> "PRE HOOK 0 2 /batch_norm_0";
"POST HOOK 1 /conv2d_0" -> "PRE HOOK 0 4 /C_0";
"PRE HOOK 0 2 /batch_norm_0" -> "2 /batch_norm_0";
"POST HOOK 2 /batch_norm_0" -> "PRE HOOK 0 3 /RELU_0";
"PRE HOOK 0 3 /RELU_0" -> "3 /RELU_0";
"PRE HOOK 0 2 /batch_norm_0" -> "2 /batch_norm_0\n3 /RELU_0\n";
"POST HOOK 3 /RELU_0" -> "PRE HOOK 1 4 /C_0";
"PRE HOOK 0 4 /C_0" -> "4 /C_0";
"PRE HOOK 1 4 /C_0" -> "4 /C_0";
"POST HOOK 4 /C_0" -> "PRE HOOK 0 5 /B_0";
"PRE HOOK 0 5 /B_0" -> "5 /B_0";
"2 /batch_norm_0\n3 /RELU_0\n" -> "POST HOOK 3 /RELU_0";
}
Original file line number Diff line number Diff line change
Expand Up @@ -5,13 +5,13 @@ strict digraph {
"3 DenseNet/NNCFConv2d[conv1]/conv2d_0" [id=3, scope="DenseNet/NNCFConv2d[conv1]", type=conv2d];
"4 DenseNet/NNCFConv2d[conv1]/AsymmetricQuantizer/asymmetric_quantize_0" [id=4, scope="DenseNet/NNCFConv2d[conv1]/AsymmetricQuantizer", type=asymmetric_quantize];
"5 DenseNet/Sequential[dense1]/Bottleneck[0]/BatchNorm2d[bn1]/batch_norm_0" [id=5, scope="DenseNet/Sequential[dense1]/Bottleneck[0]/BatchNorm2d[bn1]", type=batch_norm];
"6 DenseNet/Sequential[dense1]/Bottleneck[0]/BatchNorm2d[bn1]/AsymmetricQuantizer/asymmetric_quantize_0" [id=6, scope="DenseNet/Sequential[dense1]/Bottleneck[0]/BatchNorm2d[bn1]/AsymmetricQuantizer", type=asymmetric_quantize];
"7 DenseNet/Sequential[dense1]/Bottleneck[0]/RELU_0" [id=7, scope="DenseNet/Sequential[dense1]/Bottleneck[0]", type=RELU];
"6 DenseNet/Sequential[dense1]/Bottleneck[0]/RELU_0" [id=6, scope="DenseNet/Sequential[dense1]/Bottleneck[0]", type=RELU];
"7 DenseNet/Sequential[dense1]/Bottleneck[0]/AsymmetricQuantizer/asymmetric_quantize_0" [id=7, scope="DenseNet/Sequential[dense1]/Bottleneck[0]/AsymmetricQuantizer", type=asymmetric_quantize];
"8 DenseNet/Sequential[dense1]/Bottleneck[0]/NNCFConv2d[conv1]/ModuleDict[pre_ops]/UpdateWeight[0]/AsymmetricQuantizer[op]/asymmetric_quantize_0" [id=8, scope="DenseNet/Sequential[dense1]/Bottleneck[0]/NNCFConv2d[conv1]/ModuleDict[pre_ops]/UpdateWeight[0]/AsymmetricQuantizer[op]", type=asymmetric_quantize];
"9 DenseNet/Sequential[dense1]/Bottleneck[0]/NNCFConv2d[conv1]/conv2d_0" [id=9, scope="DenseNet/Sequential[dense1]/Bottleneck[0]/NNCFConv2d[conv1]", type=conv2d];
"10 DenseNet/Sequential[dense1]/Bottleneck[0]/BatchNorm2d[bn2]/batch_norm_0" [id=10, scope="DenseNet/Sequential[dense1]/Bottleneck[0]/BatchNorm2d[bn2]", type=batch_norm];
"11 DenseNet/Sequential[dense1]/Bottleneck[0]/RELU_1" [id=11, scope="DenseNet/Sequential[dense1]/Bottleneck[0]", type=RELU];
"12 DenseNet/Sequential[dense1]/Bottleneck[0]/AsymmetricQuantizer/asymmetric_quantize_0" [id=12, scope="DenseNet/Sequential[dense1]/Bottleneck[0]/AsymmetricQuantizer", type=asymmetric_quantize];
"12 DenseNet/Sequential[dense1]/Bottleneck[0]/AsymmetricQuantizer/asymmetric_quantize_1" [id=12, scope="DenseNet/Sequential[dense1]/Bottleneck[0]/AsymmetricQuantizer", type=asymmetric_quantize];
"13 DenseNet/Sequential[dense1]/Bottleneck[0]/NNCFConv2d[conv2]/ModuleDict[pre_ops]/UpdateWeight[0]/AsymmetricQuantizer[op]/asymmetric_quantize_0" [id=13, scope="DenseNet/Sequential[dense1]/Bottleneck[0]/NNCFConv2d[conv2]/ModuleDict[pre_ops]/UpdateWeight[0]/AsymmetricQuantizer[op]", type=asymmetric_quantize];
"14 DenseNet/Sequential[dense1]/Bottleneck[0]/NNCFConv2d[conv2]/conv2d_0" [id=14, scope="DenseNet/Sequential[dense1]/Bottleneck[0]/NNCFConv2d[conv2]", type=conv2d];
"15 DenseNet/Sequential[dense1]/Bottleneck[0]/NNCFConv2d[conv2]/AsymmetricQuantizer/asymmetric_quantize_0" [id=15, scope="DenseNet/Sequential[dense1]/Bottleneck[0]/NNCFConv2d[conv2]/AsymmetricQuantizer", type=asymmetric_quantize];
Expand Down Expand Up @@ -738,15 +738,15 @@ strict digraph {
"2 DenseNet/NNCFConv2d[conv1]/ModuleDict[pre_ops]/UpdateWeight[0]/AsymmetricQuantizer[op]/asymmetric_quantize_0" -> "3 DenseNet/NNCFConv2d[conv1]/conv2d_0";
"3 DenseNet/NNCFConv2d[conv1]/conv2d_0" -> "4 DenseNet/NNCFConv2d[conv1]/AsymmetricQuantizer/asymmetric_quantize_0";
"4 DenseNet/NNCFConv2d[conv1]/AsymmetricQuantizer/asymmetric_quantize_0" -> "5 DenseNet/Sequential[dense1]/Bottleneck[0]/BatchNorm2d[bn1]/batch_norm_0";
"5 DenseNet/Sequential[dense1]/Bottleneck[0]/BatchNorm2d[bn1]/batch_norm_0" -> "6 DenseNet/Sequential[dense1]/Bottleneck[0]/BatchNorm2d[bn1]/AsymmetricQuantizer/asymmetric_quantize_0";
"6 DenseNet/Sequential[dense1]/Bottleneck[0]/BatchNorm2d[bn1]/AsymmetricQuantizer/asymmetric_quantize_0" -> "7 DenseNet/Sequential[dense1]/Bottleneck[0]/RELU_0";
"7 DenseNet/Sequential[dense1]/Bottleneck[0]/RELU_0" -> "9 DenseNet/Sequential[dense1]/Bottleneck[0]/NNCFConv2d[conv1]/conv2d_0";
"5 DenseNet/Sequential[dense1]/Bottleneck[0]/BatchNorm2d[bn1]/batch_norm_0" -> "6 DenseNet/Sequential[dense1]/Bottleneck[0]/RELU_0";
"6 DenseNet/Sequential[dense1]/Bottleneck[0]/RELU_0" -> "7 DenseNet/Sequential[dense1]/Bottleneck[0]/AsymmetricQuantizer/asymmetric_quantize_0";
"7 DenseNet/Sequential[dense1]/Bottleneck[0]/AsymmetricQuantizer/asymmetric_quantize_0" -> "9 DenseNet/Sequential[dense1]/Bottleneck[0]/NNCFConv2d[conv1]/conv2d_0";
"8 DenseNet/Sequential[dense1]/Bottleneck[0]/NNCFConv2d[conv1]/ModuleDict[pre_ops]/UpdateWeight[0]/AsymmetricQuantizer[op]/asymmetric_quantize_0" -> "9 DenseNet/Sequential[dense1]/Bottleneck[0]/NNCFConv2d[conv1]/conv2d_0";
"4 DenseNet/NNCFConv2d[conv1]/AsymmetricQuantizer/asymmetric_quantize_0" -> "16 DenseNet/Sequential[dense1]/Bottleneck[0]/cat_0";
"9 DenseNet/Sequential[dense1]/Bottleneck[0]/NNCFConv2d[conv1]/conv2d_0" -> "10 DenseNet/Sequential[dense1]/Bottleneck[0]/BatchNorm2d[bn2]/batch_norm_0";
"10 DenseNet/Sequential[dense1]/Bottleneck[0]/BatchNorm2d[bn2]/batch_norm_0" -> "11 DenseNet/Sequential[dense1]/Bottleneck[0]/RELU_1";
"11 DenseNet/Sequential[dense1]/Bottleneck[0]/RELU_1" -> "12 DenseNet/Sequential[dense1]/Bottleneck[0]/AsymmetricQuantizer/asymmetric_quantize_0";
"12 DenseNet/Sequential[dense1]/Bottleneck[0]/AsymmetricQuantizer/asymmetric_quantize_0" -> "14 DenseNet/Sequential[dense1]/Bottleneck[0]/NNCFConv2d[conv2]/conv2d_0";
"11 DenseNet/Sequential[dense1]/Bottleneck[0]/RELU_1" -> "12 DenseNet/Sequential[dense1]/Bottleneck[0]/AsymmetricQuantizer/asymmetric_quantize_1";
"12 DenseNet/Sequential[dense1]/Bottleneck[0]/AsymmetricQuantizer/asymmetric_quantize_1" -> "14 DenseNet/Sequential[dense1]/Bottleneck[0]/NNCFConv2d[conv2]/conv2d_0";
"13 DenseNet/Sequential[dense1]/Bottleneck[0]/NNCFConv2d[conv2]/ModuleDict[pre_ops]/UpdateWeight[0]/AsymmetricQuantizer[op]/asymmetric_quantize_0" -> "14 DenseNet/Sequential[dense1]/Bottleneck[0]/NNCFConv2d[conv2]/conv2d_0";
"14 DenseNet/Sequential[dense1]/Bottleneck[0]/NNCFConv2d[conv2]/conv2d_0" -> "15 DenseNet/Sequential[dense1]/Bottleneck[0]/NNCFConv2d[conv2]/AsymmetricQuantizer/asymmetric_quantize_0";
"15 DenseNet/Sequential[dense1]/Bottleneck[0]/NNCFConv2d[conv2]/AsymmetricQuantizer/asymmetric_quantize_0" -> "16 DenseNet/Sequential[dense1]/Bottleneck[0]/cat_0";
Expand Down
Loading

0 comments on commit 139c390

Please sign in to comment.