-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathfunctionReLUExample.py
63 lines (49 loc) · 1.68 KB
/
functionReLUExample.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
'''
This example does not use the polynomial layers, but instead just tries
to match the function using relu. I've included this for comparison purposes.
'''
import matplotlib.pyplot as plt
import numpy as np
import tensorflow as tf
import high_order_layers.PolynomialLayers as poly
from tensorflow.keras.layers import *
offset = -0.1
factor = 1.5 * 3.14159
xTest = (np.arange(100) / 50 - 1.0)
yTest = 0.5 * np.cos(factor * (xTest - offset))
xTrain = (tf.random.uniform([1000], minval=-1.0, maxval=1, dtype=tf.float32))
yTrain = 0.5 * tf.math.cos(factor * (xTrain - offset))
modelSet = [
{'name': '5 hidden units', 'units': 5},
{'name': '10 hidden units', 'units': 10},
{'name': '20 hidden units', 'units': 20},
#{'name' : '500 hidden units', 'units' : 500}
]
colorIndex = ['red', 'green', 'blue', 'purple', 'black']
symbol = ['+', 'x', 'o', 'v', '.']
thisModelSet = modelSet
for i in range(0, len(thisModelSet)):
model = tf.keras.models.Sequential([
Dense(1),
Dense(thisModelSet[i]['units'], activation='relu'),
Dense(thisModelSet[i]['units'], activation='relu'),
Dense(1)
])
model.compile(optimizer='adam',
loss='mean_squared_error',
metrics=['accuracy'])
model.fit(xTrain, yTrain, epochs=40, batch_size=1)
model.evaluate(xTrain, yTrain)
predictions = model.predict(xTest)
plt.scatter(
xTest,
predictions.flatten(),
c=colorIndex[i],
marker=symbol[i],
label=thisModelSet[i]['name'])
plt.plot(xTest, yTest, '-', label='actual', color='black')
plt.title('standard relu layer - two hidden layers')
plt.xlabel('x')
plt.ylabel('y')
plt.legend()
plt.show()