AttributeError: 'Adam' object has no attribute 'zero_grad'
时间: 2023-09-07 13:12:29 浏览: 92
对不起,我犯了一个错误。在Keras中,优化器没有 `zero_grad` 方法。相反,您可以使用 `optimizer.apply_gradients([])` 来清除梯度。以下是修改后的代码:
```python
import tensorflow as tf
from tensorflow import keras
# 创建生成器模型
generator = keras.Sequential([...]) # 填入生成器的层
# 创建辨别器模型
discriminator = keras.Sequential([...]) # 填入辨别器的层
# 定义损失函数
criterion = keras.losses.BinaryCrossentropy(from_logits=True)
# 定义优化器
optimizer_d = keras.optimizers.Adam(learning_rate=0.0001)
optimizer_g = keras.optimizers.Adam(learning_rate=0.0001)
for i in range(MAX_EPOCH):
# 为真实数据加上噪声
real_data = np.vstack([np.sin(POINT) + np.random.normal(0, 0.01, SAMPLE_NUM) for _ in range(BATCH_SIZE)])
real_data = tf.convert_to_tensor(real_data, dtype=tf.float32)
real_data = tf.Variable(real_data).to(device)
# 用随机噪声作为生成器的输入
g_noises = np.random.randn(BATCH_SIZE, N_GNET)
g_noises = tf.convert_to_tensor(g_noises, dtype=tf.float32)
g_noises = tf.Variable(g_noises).to(device)
with tf.GradientTape() as tape_d:
d_real = discriminator(real_data)
loss_d_real = criterion(d_real, true_lable)
grads_d_real = tape_d.gradient(loss_d_real, discriminator.trainable_weights)
optimizer_d.apply_gradients(zip(grads_d_real, discriminator.trainable_weights))
with tf.GradientTape() as tape_d:
fake_data = generator(g_noises)
d_fake = discriminator(fake_data)
loss_d_fake = criterion(d_fake, fake_lable)
grads_d_fake = tape_d.gradient(loss_d_fake, discriminator.trainable_weights)
optimizer_d.apply_gradients(zip(grads_d_fake, discriminator.trainable_weights))
```