-
Notifications
You must be signed in to change notification settings - Fork 0
/
example-00.py
77 lines (60 loc) · 2.39 KB
/
example-00.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
import numpy as np
import tensorflow as tf
from tensorflow import keras
from tensorflow.keras import layers
# Define the length of sequences to be generated
sequence_length = 10
latent_dim = 100
# Generator
generator = keras.Sequential([
layers.Input(shape=(latent_dim,)),
layers.Reshape((1, latent_dim)),
layers.LSTM(128, return_sequences=True),
layers.TimeDistributed(layers.Dense(1))
])
# Discriminator
discriminator = keras.Sequential([
layers.Input(shape=(sequence_length, 1)),
layers.LSTM(128, return_sequences=True),
layers.TimeDistributed(layers.Dense(1)),
layers.Flatten(),
layers.Dense(1, activation='sigmoid')
])
# GAN
discriminator.trainable = False
gan_input = keras.Input(shape=(latent_dim,))
x = generator(gan_input)
gan_output = discriminator(x)
gan = keras.models.Model(gan_input, gan_output)
# Compile models
discriminator.compile(optimizer='adam', loss='binary_crossentropy')
gan.compile(optimizer='adam', loss='binary_crossentropy')
# Training loop
batch_size = 32
epochs = 1000
for epoch in range(epochs):
# Generate random noise as input to the generator
noise = np.random.normal(0, 1, (batch_size, latent_dim))
# Generate synthetic sequences
generated_sequences = generator.predict(noise)
# Real sequences (for demonstration, you would use your real data here)
real_sequences = np.random.rand(batch_size, sequence_length, 1)
# Concatenate real and generated sequences for discriminator training
combined_sequences = np.concatenate([real_sequences, generated_sequences])
# Labels for discriminator (1 for real, 0 for generated)
labels = np.concatenate([np.ones((batch_size, 1)), np.zeros((batch_size, 1))])
# Train discriminator
d_loss = discriminator.train_on_batch(combined_sequences, labels)
# Generate new random noise for the generator
noise = np.random.normal(0, 1, (batch_size, latent_dim))
# Labels for generator (1 for trying to fool discriminator)
labels = np.ones((batch_size, 1))
# Train GAN
g_loss = gan.train_on_batch(noise, labels)
# Print progress
if epoch % 100 == 0:
print(f"Epoch: {epoch}, D Loss: {d_loss}, G Loss: {g_loss}")
# After training, you can generate sequences using the generator
generated_sequences = generator.predict(np.random.normal(0, 1, (10, latent_dim)))
print("Generated Sequences:")
print(generated_sequences)