Tensorflow - 使用 tf.cond() 时出现类型错误
Tensorflow - TypeError when using tf.cond()
下面是我在 Tensorflow 2.6.0 中使用的代码片段
import tensorflow as tf
x = tf.keras.Input(shape=(224, 224, 3), batch_size=None)
x1=tf.keras.Input(1, dtype=tf.int32)
y = tf.cond(tf.less(x1,5), lambda :tf.keras.layers.ReLU()(x), lambda :tf.keras.layers.LeakyReLU(alpha=0.1)(x))
model=tf.keras.models.Model(inputs=[x,x1], outputs=[y])
model.summary()
Error:
python3.7/site-packages/tensorflow/python/framework/func_graph.py",
line 969, in convert (str(python_func), type(x)))
TypeError: To be compatible with tf.eager.defun, Python functions must return zero or more Tensors; in compilation of <function at 0x7fe4743fde60>, found return value of type <class 'keras.engine.keras_tensor.KerasTensor'>, which is not a Tensor.
问题是 tf.cond
不适用于 KerasTensors
(由 Keras
层返回的张量)。您可以尝试将 tf.cond
包裹在自定义层中:
import tensorflow as tf
class ConditionalActivationLayer(tf.keras.layers.Layer):
def call(self, inputs):
x1, x = inputs[0], inputs[1]
return tf.cond(tf.less(x1,5), lambda :tf.nn.relu(x), lambda :tf.nn.leaky_relu(x, alpha=0.1))
x = tf.keras.Input(shape=(224, 224, 3), batch_size=None)
x1= tf.keras.Input(1, dtype=tf.int32)
y = ConditionalActivationLayer()([x1, x])
model=tf.keras.models.Model(inputs=[x,x1], outputs=[y])
model.summary()
odel: "model"
__________________________________________________________________________________________________
Layer (type) Output Shape Param # Connected to
==================================================================================================
input_6 (InputLayer) [(None, 1)] 0 []
input_5 (InputLayer) [(None, 224, 224, 3 0 []
)]
conditional_activation_layer ( (None, 224, 224, 3) 0 ['input_6[0][0]',
ConditionalActivationLayer) 'input_5[0][0]']
==================================================================================================
Total params: 0
Trainable params: 0
Non-trainable params: 0
__________________________________________________________________________________________________
也适用于:
return tf.cond(tf.less(x1,5), lambda :tf.keras.layers.ReLU()(x), lambda :tf.keras.layers.LeakyReLU(alpha=0.1)(x))
这只是一个品味问题。
您也可以禁用 eager execution,它应该可以工作,因为输入层现在是普通张量:
import tensorflow as tf
tf.compat.v1.disable_eager_execution()
x = tf.keras.Input(shape=(224, 224, 3), batch_size=None)
x1=tf.keras.Input(1, dtype=tf.int32)
print(type(x1), type(x)) #<class 'tensorflow.python.framework.ops.Tensor'> <class 'tensorflow.python.framework.ops.Tensor'>
y = tf.cond(tf.less(x1,5), lambda :tf.keras.layers.ReLU()(x), lambda :tf.keras.layers.LeakyReLU(alpha=0.1)(x))
model=tf.keras.models.Model(inputs=[x,x1], outputs=[y])
model.summary()
下面是我在 Tensorflow 2.6.0 中使用的代码片段
import tensorflow as tf
x = tf.keras.Input(shape=(224, 224, 3), batch_size=None)
x1=tf.keras.Input(1, dtype=tf.int32)
y = tf.cond(tf.less(x1,5), lambda :tf.keras.layers.ReLU()(x), lambda :tf.keras.layers.LeakyReLU(alpha=0.1)(x))
model=tf.keras.models.Model(inputs=[x,x1], outputs=[y])
model.summary()
Error: python3.7/site-packages/tensorflow/python/framework/func_graph.py", line 969, in convert (str(python_func), type(x)))
TypeError: To be compatible with tf.eager.defun, Python functions must return zero or more Tensors; in compilation of <function at 0x7fe4743fde60>, found return value of type <class 'keras.engine.keras_tensor.KerasTensor'>, which is not a Tensor.
问题是 tf.cond
不适用于 KerasTensors
(由 Keras
层返回的张量)。您可以尝试将 tf.cond
包裹在自定义层中:
import tensorflow as tf
class ConditionalActivationLayer(tf.keras.layers.Layer):
def call(self, inputs):
x1, x = inputs[0], inputs[1]
return tf.cond(tf.less(x1,5), lambda :tf.nn.relu(x), lambda :tf.nn.leaky_relu(x, alpha=0.1))
x = tf.keras.Input(shape=(224, 224, 3), batch_size=None)
x1= tf.keras.Input(1, dtype=tf.int32)
y = ConditionalActivationLayer()([x1, x])
model=tf.keras.models.Model(inputs=[x,x1], outputs=[y])
model.summary()
odel: "model"
__________________________________________________________________________________________________
Layer (type) Output Shape Param # Connected to
==================================================================================================
input_6 (InputLayer) [(None, 1)] 0 []
input_5 (InputLayer) [(None, 224, 224, 3 0 []
)]
conditional_activation_layer ( (None, 224, 224, 3) 0 ['input_6[0][0]',
ConditionalActivationLayer) 'input_5[0][0]']
==================================================================================================
Total params: 0
Trainable params: 0
Non-trainable params: 0
__________________________________________________________________________________________________
也适用于:
return tf.cond(tf.less(x1,5), lambda :tf.keras.layers.ReLU()(x), lambda :tf.keras.layers.LeakyReLU(alpha=0.1)(x))
这只是一个品味问题。
您也可以禁用 eager execution,它应该可以工作,因为输入层现在是普通张量:
import tensorflow as tf
tf.compat.v1.disable_eager_execution()
x = tf.keras.Input(shape=(224, 224, 3), batch_size=None)
x1=tf.keras.Input(1, dtype=tf.int32)
print(type(x1), type(x)) #<class 'tensorflow.python.framework.ops.Tensor'> <class 'tensorflow.python.framework.ops.Tensor'>
y = tf.cond(tf.less(x1,5), lambda :tf.keras.layers.ReLU()(x), lambda :tf.keras.layers.LeakyReLU(alpha=0.1)(x))
model=tf.keras.models.Model(inputs=[x,x1], outputs=[y])
model.summary()