I am reimplementing a text2speech project. I am facing a Function call stack : keras_scratch_graph error in decoder part. The network architecture is from Deep Voice 3 paper.
I am using keras from TF 2.0 on Google Colab. Below is the code for Decoder Keras Model.
y1 = tf.ones(shape = (16, 203, 320))
def Decoder(name = "decoder"):
# Decoder Prenet
din = tf.concat((tf.zeros_like(y1[:, :1, -hp.mel:]), y1[:, :-1, -hp.mel:]), 1)
keys = K.Input(shape = (180, 256), batch_size = 16, name = "keys")
vals = K.Input(shape = (180, 256), batch_size = 16, name = "vals")
prev_max_attentions_li = tf.ones(shape=(hp.dlayer, hp.batch_size), dtype=tf.int32)
#prev_max_attentions_li = K.Input(tensor = prev_max_attentions_li)
for i in range(hp.dlayer):
dpout = K.layers.Dropout(rate = 0 if i == 0 else hp.dropout)(din)
fc_out = K.layers.Dense(hp.char_embed, activation = 'relu')(dpout)
print("=======================================================================================================")
print("The FC value is ", fc_out)
print("=======================================================================================================")
query_pe = K.layers.Embedding(hp.Ty, hp.char_embed)(tf.tile(tf.expand_dims(tf.range(hp.Ty // hp.r), 0), [hp.batch_size, 1]))
key_pe = K.layers.Embedding(hp.Tx, hp.char_embed)(tf.tile(tf.expand_dims(tf.range(hp.Tx), 0), [hp.batch_size, 1]))
alignments_li, max_attentions_li = [], []
for i in range(hp.dlayer):
dpout = K.layers.Dropout(rate = 0)(fc_out)
queries = K.layers.Conv1D(hp.datten_size, hp.dfilter, padding = 'causal', dilation_rate = 2**i)(dpout)
fc_out = (queries + fc_out) * tf.math.sqrt(0.5)
print("=======================================================================================================")
print("The FC value is ", fc_out)
print("=======================================================================================================")
queries = fc_out + query_pe
keys += key_pe
tensor, alignments, max_attentions = Attention(name = "attention")(queries, keys, vals, prev_max_attentions_li[i])
fc_out = (tensor + queries) * tf.math.sqrt(0.5)
alignments_li.append(alignments)
max_attentions_li.append(max_attentions)
decoder_output = fc_out
dpout = K.layers.Dropout(rate = 0)(decoder_output)
mel_logits = K.layers.Dense(hp.mel * hp.r)(dpout)
dpout = K.layers.Dropout(rate = 0)(fc_out)
done_output = K.layers.Dense(2)(dpout)
return K.Model(inputs = [keys, vals], outputs = [mel_logits, done_output, decoder_output, alignments_li, max_attentions_li], name = name)
decode = Decoder()
kin = tf.ones(shape = (16, 180, 256))
vin = tf.ones(shape = (16, 180, 256))
print(decode(kin, vin))
tf.keras.utils.plot_model(decode, to_file = "decoder.png", show_shapes = True)
When I test with some data, it shows the error messages below. It's going to be some problem with "fc_out", but I dun know how to pass "fc_out" output from the first for loop to the second for loop? Any answer would be appreciated.
File "Decoder.py", line 60, in <module>
decode = Decoder()
File "Decoder.py", line 33, in Decoder
dpout = K.layers.Dropout(rate = 0)(fc_out)
File "/Users/ydc/dl-npm/lib/python3.7/site-packages/tensorflow/python/keras/engine/base_layer.py", line 596, in __call__
base_layer_utils.create_keras_history(inputs)
File "/Users/ydc/dl-npm/lib/python3.7/site-packages/tensorflow/python/keras/engine/base_layer_utils.py", line 199, in create_keras_history
_, created_layers = _create_keras_history_helper(tensors, set(), [])
File "/Users/ydc/dl-npm/lib/python3.7/site-packages/tensorflow/python/keras/engine/base_layer_utils.py", line 245, in _create_keras_history_helper
layer_inputs, processed_ops, created_layers)
File "/Users/ydc/dl-npm/lib/python3.7/site-packages/tensorflow/python/keras/engine/base_layer_utils.py", line 245, in _create_keras_history_helper
layer_inputs, processed_ops, created_layers)
File "/Users/ydc/dl-npm/lib/python3.7/site-packages/tensorflow/python/keras/engine/base_layer_utils.py", line 245, in _create_keras_history_helper
layer_inputs, processed_ops, created_layers)
File "/Users/ydc/dl-npm/lib/python3.7/site-packages/tensorflow/python/keras/engine/base_layer_utils.py", line 243, in _create_keras_history_helper
constants[i] = backend.function([], op_input)([])
File "/Users/ydc/dl-npm/lib/python3.7/site-packages/tensorflow/python/keras/backend.py", line 3510, in __call__
outputs = self._graph_fn(*converted_inputs)
File "/Users/ydc/dl-npm/lib/python3.7/site-packages/tensorflow/python/eager/function.py", line 572, in __call__
return self._call_flat(args)
File "/Users/ydc/dl-npm/lib/python3.7/site-packages/tensorflow/python/eager/function.py", line 671, in _call_flat
outputs = self._inference_function.call(ctx, args)
File "/Users/ydc/dl-npm/lib/python3.7/site-packages/tensorflow/python/eager/function.py", line 445, in call
ctx=ctx)
File "/Users/ydc/dl-npm/lib/python3.7/site-packages/tensorflow/python/eager/execute.py", line 67, in quick_execute
six.raise_from(core._status_to_exception(e.code, message), None)
File "<string>", line 3, in raise_from
tensorflow.python.framework.errors_impl.FailedPreconditionError: Error while reading resource variable _AnonymousVar19 from Container: localhost. This could mean that the variable was uninitialized. Not found: Resource localhost/_AnonymousVar19/N10tensorflow3VarE does not exist.
[[node dense_7/BiasAdd/ReadVariableOp (defined at Decoder.py:33) ]] [Op:__inference_keras_scratch_graph_566]
Function call stack:
keras_scratch_graph
I was getting similar error. I reduced the batch size and the error disappeared. I don't know why but it worked for me. I am guessing something related to over stacking.
If you love us? You can donate to us via Paypal or buy me a coffee so we can maintain and grow! Thank you!
Donate Us With