Skip to content
This repository has been archived by the owner on Dec 16, 2022. It is now read-only.

Commit

Permalink
Add the attention visualization to the textual entailment demo (#1219)
Browse files Browse the repository at this point in the history
* Fixes #1033

* changes following PR review.
1. The predictor is now responsible for tokenizing hypothesis and premise.
2. The model no longer takes the metadata parameter anymore.

* Removed some extra blank lines

* Fix spacing issues
  • Loading branch information
murphp15 authored and matt-gardner committed May 16, 2018
1 parent b71ef43 commit 10ea3b3
Show file tree
Hide file tree
Showing 3 changed files with 47 additions and 5 deletions.
5 changes: 4 additions & 1 deletion allennlp/models/decomposable_attention.py
Original file line number Diff line number Diff line change
Expand Up @@ -162,7 +162,10 @@ def forward(self, # type: ignore
label_logits = self._aggregate_feedforward(aggregate_input)
label_probs = torch.nn.functional.softmax(label_logits, dim=-1)

output_dict = {"label_logits": label_logits, "label_probs": label_probs}
output_dict = {"label_logits": label_logits,
"label_probs": label_probs,
"h2p_attention": h2p_attention,
"p2h_attention": p2h_attention}

if label is not None:
loss = self._loss(label_logits, label.long().view(-1))
Expand Down
11 changes: 9 additions & 2 deletions allennlp/service/predictors/decomposable_attention.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
from typing import Tuple
from overrides import overrides

from overrides import overrides
from allennlp.data.dataset_readers.snli import SnliReader
from allennlp.common.util import JsonDict
from allennlp.data import Instance
from allennlp.service.predictors.predictor import Predictor
Expand Down Expand Up @@ -38,4 +39,10 @@ def _json_to_instance(self, json_dict: JsonDict) -> Tuple[Instance, JsonDict]:
"""
premise_text = json_dict["premise"]
hypothesis_text = json_dict["hypothesis"]
return self._dataset_reader.text_to_instance(premise_text, hypothesis_text), {}
snli_reader: SnliReader = self._dataset_reader # type: ignore
tokenizer = snli_reader._tokenizer # pylint: disable=protected-access

return self._dataset_reader.text_to_instance(premise_text, hypothesis_text), {
'premise_tokens': [token.text for token in tokenizer.tokenize(premise_text)],
'hypothesis_tokens': [token.text for token in tokenizer.tokenize(hypothesis_text)]
}
36 changes: 34 additions & 2 deletions demo/src/components/TeComponent.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,8 @@ import React from 'react';
import { API_ROOT } from '../api-config';
import { withRouter } from 'react-router-dom';
import {PaneLeft, PaneRight} from './Pane'
import HeatMap from './heatmap/HeatMap'
import Collapsible from 'react-collapsible'
import Button from './Button'
import ModelIntro from './ModelIntro'

Expand Down Expand Up @@ -166,7 +168,7 @@ class TeGraph extends React.Component {

class TeOutput extends React.Component {
render() {
const { labelProbs } = this.props;
const { labelProbs, h2p_attention, p2h_attention, premise_tokens, hypothesis_tokens } = this.props;
const [entailment, contradiction, neutral] = labelProbs;

let judgment; // Valid values: "e", "c", "n"
Expand Down Expand Up @@ -274,6 +276,28 @@ class TeOutput extends React.Component {
</table>
</div>
</div>
<div className="form__field">
<Collapsible trigger="Model internals (beta)">
<Collapsible trigger="premise to hypothesis attention">
<span>
For every premise word, the model computes an attention over the hypothesis words.
This heatmap shows that attention, which is normalized for every row in the matrix.
</span>
<div className="heatmap">
<HeatMap xLabels={premise_tokens} yLabels={hypothesis_tokens} data={h2p_attention} />
</div>
</Collapsible>
<Collapsible trigger="hypothesis to premise attention">
<span>
For every hypothesis word, the model computes an attention over the premise words.
This heatmap shows that attention, which is normalized for every row in the matrix.
</span>
<div className="heatmap">
<HeatMap xLabels={hypothesis_tokens} yLabels={premise_tokens} data={p2h_attention} />
</div>
</Collapsible>
</Collapsible>
</div>
</div>
);
}
Expand Down Expand Up @@ -340,6 +364,10 @@ class _TeComponent extends React.Component {
const premise = requestData && requestData.premise;
const hypothesis = requestData && requestData.hypothesis;
const labelProbs = responseData && responseData.label_probs;
const h2p_attention = responseData && responseData.h2p_attention;
const p2h_attention = responseData && responseData.p2h_attention;
const premise_tokens = responseData && responseData.premise_tokens;
const hypothesis_tokens = responseData && responseData.hypothesis_tokens;

return (
<div className="pane model">
Expand All @@ -350,7 +378,11 @@ class _TeComponent extends React.Component {
hypothesis={hypothesis}/>
</PaneLeft>
<PaneRight outputState={this.state.outputState}>
<TeOutput labelProbs={labelProbs}/>
<TeOutput labelProbs={labelProbs}
h2p_attention={h2p_attention}
p2h_attention={p2h_attention}
premise_tokens={premise_tokens}
hypothesis_tokens={hypothesis_tokens}/>
</PaneRight>
</div>
);
Expand Down

0 comments on commit 10ea3b3

Please sign in to comment.