tf.while_loop() in TensorFlow 2 自定义层的 call()

tf.while_loop() in call() of custom layer in TensorFlow 2

我想编写一个自定义层来应用密集层,然后将一些指定的函数应用于该计算的输出。我想指定应用于列表中各个输出的函数,以便我可以轻松更改它们。

我正在尝试在 tf.while_loop 中应用函数,但我不知道如何访问和写入 dense_output_nodes 的各个元素。

dense_output_nodes[i] = ... 不起作用,因为它告诉我

TypeError: 'Tensor' object does not support item assignment

所以我之前尝试tf.unstack,这是下面的代码,但是现在用hidden_1 = ArithmeticLayer(unit_types=['id', 'sin', 'cos'])(inputs)创建图层时,我得到的错误是

TypeError: list indices must be integers or slices, not Tensor

因为显然 TensorFlow 将 itf.constant 转换为 tf.Tensor

到目前为止,我真的很难找到解决此问题的方法。有什么办法可以让它工作吗? 或者我应该将整个 ArithmeticLayer 构建为应用自定义函数的 Dense 层和 Lambda 层的组合?

class ArithmeticLayer(layers.Layer):
    # u = number of units
    
    def __init__(self, name=None, regularizer=None, unit_types=['id', 'sin', 'cos']):
        self.regularizer=regularizer
        super().__init__(name=name)
        self.u = len(unit_types)
        self.u_types = unit_types

    def build(self, input_shape):
        self.w = self.add_weight(shape=(input_shape[-1], self.u),
                                 initializer='random_normal',
                                 regularizer=self.regularizer,
                                 trainable=True)
        self.b = self.add_weight(shape=(self.u,),
                                 initializer='random_normal',
                                 regularizer=self.regularizer,
                                 trainable=True)


    def call(self, inputs):
        # get the output nodes of the dense layer as a list
        dense_output_nodes = tf.matmul(inputs, self.w) + self.b
        dense_output_list = tf.unstack(dense_output_nodes, axis=1)
        
        # apply the function units
        i = tf.constant(0)
        def c(i):
            return tf.less(i, self.u)
        def b(i):
            dense_output_list[i] = tf.cond(self.u_types[i] == 'sin',
                                            lambda: tf.math.sin(dense_output_list[i]),
                                            lambda: dense_output_list[i]
                                           )
            dense_output_list[i] = tf.cond(self.u_types[i] == 'cos',
                                            lambda: tf.math.cos(dense_output_list[i]),
                                            lambda: dense_output_list[i]
                                           )
            return (tf.add(i, 1), )
        [i] = tf.while_loop(c, b, [i])
        
        final_output_nodes = tf.stack(dense_output_list, axis=1)
        return final_output_nodes

感谢任何建议!

如果您打算使用不同的数据结构,请试试这个。

import tensorflow as tf

i = tf.constant(0)
u_types = ["sin","cos"]
u_types_ta = tf.TensorArray(dtype=tf.string,size=1, dynamic_size=True,clear_after_read=False)
for i in range(0, len(u_types)):
    u_types_ta = u_types_ta.write(i, u_types[i])
u = len(u_types)

dense_output_list = [1.,2.]
dense_output_ta = tf.TensorArray(dtype=tf.float32,size=1, dynamic_size=True,clear_after_read=False)
for i in range(0, len(dense_output_list)):
    dense_output_ta = dense_output_ta.write(i, dense_output_list[i])

ta = tf.TensorArray(dtype=tf.float32,size=1, dynamic_size=True,clear_after_read=False)


def c(i,_):
    return tf.less(i, u)


def b(i,ta):
    ta.write(i, tf.cond(u_types_ta.read(i) == 'sin',
                                   lambda: tf.math.sin(dense_output_ta.read(i)),
                                   lambda: dense_output_ta.read(i)
                                   ))
    ta.write(i, tf.cond(u_types_ta.read(i) == 'cos',
                                   lambda: tf.math.cos(dense_output_ta.read(i)),
                                   lambda: dense_output_ta.read(i)
                                   ))
    return (tf.add(i, 1),ta)


i,_ = tf.while_loop(c, b, [i,ta])
print(ta.stack())

如果您想在批处理样本中应用某些函数 column-wise,使用 tf.tensor_scatter_nd_update 应该可以解决问题。这是一个在急切执行和图形模式下工作的示例:

import tensorflow as tf

class ArithmeticLayer(tf.keras.layers.Layer):
    # u = number of units
    
    def __init__(self, name=None, regularizer=None, unit_types=['id', 'sin', 'cos']):
        self.regularizer=regularizer
        super().__init__(name=name)
        self.u_types = tf.constant(unit_types)
        self.u_shape = tf.shape(self.u_types)

    def build(self, input_shape):
        self.w = self.add_weight(shape=(input_shape[-1], self.u_shape[0]),
                                 initializer='random_normal',
                                 regularizer=self.regularizer,
                                 trainable=True)
        self.b = self.add_weight(shape=(self.u_shape[0],),
                                 initializer='random_normal',
                                 regularizer=self.regularizer,
                                 trainable=True)

    def call(self, inputs):
        dense_output_nodes = tf.matmul(inputs, self.w) + self.b
        d_shape = tf.shape(dense_output_nodes)
        i = tf.constant(0)
        c = lambda i, d: tf.less(i, self.u_shape[0])

        def b(i, d):
          d = tf.cond(unit_types[i] == 'sin', 
                lambda: tf.tensor_scatter_nd_update(d, tf.stack([tf.range(d_shape[0]), tf.repeat([i], d_shape[0])], axis=1), tf.math.sin(d[:, i])), 
                lambda: d)
          d = tf.cond(unit_types[i] == 'cos', 
                lambda: tf.tensor_scatter_nd_update(d, tf.stack([tf.range(d_shape[0]), tf.repeat([i], d_shape[0])], axis=1), tf.math.cos(d[:, i])), 
                lambda: d)
          return tf.add(i, 1), d
        _, dense_output_nodes = tf.while_loop(c, b, loop_vars=[i, dense_output_nodes])

        return dense_output_nodes
x = tf.random.normal((4, 3))
inputs = tf.keras.layers.Input((3,))
arithmetic = ArithmeticLayer()
outputs = arithmetic(inputs)
model = tf.keras.Model(inputs, outputs)
model.compile(optimizer='adam', loss='mse')
model.fit(x, tf.random.normal((4, 3)), batch_size=2)
2/2 [==============================] - 3s 11ms/step - loss: 1.4259
<keras.callbacks.History at 0x7fe50728c850>