@tf.custom_gradient
def gradient_reversal(x):
  y = x
  def grad(dy):
    return - dy
  return y, grad
#Bei Verwendung im Modell
class MyModel(tf.keras.Model):
    def __init__(self):
        super(MyModel, self).__init__()
    def call(self, x):
        return gradient_reversal(x)
class MyModel2(tf.keras.Model):
    def __init__(self):
        super(MyModel2, self).__init__()
        self.alpha = self.add_weight(name="alpha", initializer=tf.keras.initializers.Ones())
    @tf.custom_gradient
    def forward(self, x):
        y = self.alpha * x
        def backward(w, variables=None):
            with tf.GradientTape() as tape:
                tape.watch(w)
                z = - self.alpha * w
            grads = tape.gradient(z, [w])
            return z, grads
        return y, backward
    def call(self, x):
        return self.forward(x)
Da das Argument in Dokumentation "dy" ist, frage ich mich, ob das berechnete Argument "Backpropagation" übergeben wird Kann als Ausführungsfunktion der Zeit angegeben werden ("Rückwärts" -Methode oben)
Wenn Sie eine Variable mit einem Gültigkeitsbereich außerhalb der bakward -Methode verwenden, wird der folgende Fehler angezeigt, es sei denn, Sie akzeptieren variables = None (siehe auch Args in der Dokumentation):
TypeError: If using @custom_gradient with a function that uses variables, then grad_fn must accept a keyword argument 'variables'.
import tensorflow as tf
optimizer = tf.keras.optimizers.Adam(learning_rate=0.1)
class MyModel(tf.keras.Model):
    def __init__(self):
        super(MyModel, self).__init__()
        self.alpha = self.add_weight(name="alpha", initializer=tf.keras.initializers.Ones())
    @tf.custom_gradient
    def forward(self, x):
        y = self.alpha * x
        tf.print("forward")
        tf.print("  y: ", y)
        def backward(w, variables=None):
            z = self.alpha * w
            tf.print("backward")
            tf.print("  z: ", z)
            tf.print("  variables: ", variables)
            return z, variables
        return y, backward
    def call(self, x):
        return self.forward(x)
class MyModel2(tf.keras.Model):
    def __init__(self):
        super(MyModel2, self).__init__()
        self.alpha = self.add_weight(name="alpha", initializer=tf.keras.initializers.Ones())
    @tf.custom_gradient
    def forward(self, x):
        y = self.alpha * x
        tf.print("forward")
        tf.print("  y: ", y)
        def backward(w, variables=None):
            with tf.GradientTape() as tape:
                tape.watch(w)
                z = - self.alpha * w
            grads = tape.gradient(z, [w])
            tf.print("backward")
            tf.print("  z: ", z)
            tf.print("  variables: ", variables)
            tf.print("  alpha: ", self.alpha)
            tf.print("  grads: ", grads)
            return z, grads
        return y, backward
    def call(self, x):
        return self.forward(x)
for model in [MyModel(), MyModel2()]:
    print()
    print()
    print()
    print(model.name)
    for i in range(10):
        with tf.GradientTape() as tape:
            x = tf.Variable(1.0, tf.float32)
            y = model(x)
        grads = tape.gradient(y, model.trainable_variables)
        optimizer.apply_gradients(zip(grads, model.trainable_variables))
        tf.print("step")
        tf.print("  y:", y)
        tf.print("  grads:", grads)
        print()
Recommended Posts