From 5f61e578aa2aaff363dc024c79047bae6fdfdec7 Mon Sep 17 00:00:00 2001 From: chahalinder0007 Date: Sat, 16 Nov 2019 19:48:29 +0530 Subject: [PATCH] fixed issue number 83:84 Error for attention state missing --- core/sync_attention_wrapper.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/core/sync_attention_wrapper.py b/core/sync_attention_wrapper.py index bed1ce9..9f2bdaa 100644 --- a/core/sync_attention_wrapper.py +++ b/core/sync_attention_wrapper.py @@ -49,7 +49,8 @@ def call(self, inputs, state): rnn_cell_state = state.cell_state.h else: rnn_cell_state = state.cell_state - attention, alignments = _compute_attention( + + attention, alignments,attention_state = _compute_attention( attention_mechanism, rnn_cell_state, previous_alignments[i], self._attention_layers[i] if self._attention_layers else None) alignment_history = previous_alignment_history[i].write( @@ -66,6 +67,7 @@ def call(self, inputs, state): next_state = seq2seq.AttentionWrapperState( time=state.time + 1, + attention_state=attention_state, cell_state=next_cell_state, attention=attention, alignments=self._item_or_tuple(all_alignments),