-
Notifications
You must be signed in to change notification settings - Fork 33
/
attention_function.py
119 lines (99 loc) · 4.13 KB
/
attention_function.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
import numpy as np
import pandas as pd
import keras.backend as K
from keras.layers import multiply
from keras.layers.core import Dense, Reshape, Lambda, RepeatVector, Permute, Flatten
from keras.layers.recurrent import LSTM
from keras.models import Model, Input
# plot part.
import matplotlib.pyplot as plt
# ## Helper functions
def get_activations(model, inputs, print_shape_only=False, layer_name=None, verbose=False):
"""
Get activations from a model
Args:
model: a keras model
inputs: the inputs for the model
print_shape_only: whether to print the shape of the layer or the whole activation layer
layer_name: name of specific layer to return
verbose: whether to show all outputs
Returns:
activations: list, list of activations
"""
activations = []
inp = model.input
if layer_name is None:
outputs = [layer.output for layer in model.layers]
else:
outputs = [layer.output for layer in model.layers if layer.name == layer_name] # all layer outputs
funcs = [K.function([inp] + [K.learning_phase()], [out]) for out in outputs] # evaluation functions
layer_outputs = [func([inputs, 1.])[0] for func in funcs]
for layer_activations in layer_outputs:
activations.append(layer_activations)
if verbose:
print('----- activations -----')
if print_shape_only:
print(layer_activations.shape)
else:
print(layer_activations)
return activations
def get_data_recurrent(n, time_steps, input_dim, attention_column=10):
"""
Data generation. x is random except that first value equals the target y.
network should learn that the target = x[attention_column].
Therefore, most of its attention should be focused on the value addressed by attention_column.
Args:
n: the number of samples to retrieve.
time_steps: the number of time steps of your series.
input_dim: the number of dimensions of each element in the series.
attention_column: the column linked to the target. Everything else is purely random.
Returns:
x: model inputs
y: model targets
"""
x = np.random.standard_normal(size=(n, time_steps, input_dim))
y = np.random.randint(low=0, high=2, size=(n, 1))
x[:, attention_column, :] = np.tile(y[:], (1, input_dim))
return x, y
def attention_3d_block(inputs, TIME_STEPS):
"""
inputs.shape = (batch_size, time_steps, input_dim)
"""
input_dim = int(inputs.shape[2])
a = Permute((2, 1))(inputs)
a = Reshape((input_dim, TIME_STEPS))(a)
a = Dense(TIME_STEPS, activation='softmax')(a)
a_probs = Permute((2, 1), name='attention_vec')(a)
#output_attention_mul = merge([inputs, a_probs], name='attention_mul', mode='mul')
output_attention_mul = multiply([inputs, a_probs])
return output_attention_mul
def attention_3d_block_time_features(inputs, TIME_STEPS):
"""
inputs.shape = (batch_size, time_steps, input_dim)
"""
input_dim = int(inputs.shape[2])
a = Flatten()(inputs)
a = Dense(TIME_STEPS*input_dim, activation='softmax')(a)
a = Reshape((input_dim, TIME_STEPS))(a)
a_probs = Permute((2, 1), name='attention_vec')(a)
output_attention_mul = multiply([inputs, a_probs])
return output_attention_mul
def attention_spatial_block(inputs):
"""
inputs.shape = (batch_size, time_steps, input_dim)
"""
input_dim = int(inputs.shape[2])
a = Reshape((TIME_STEPS, input_dim))(inputs)
a_probs = Dense(input_dim, activation='softmax', name='attention_vec')(a)
output_attention_mul = merge([inputs, a_probs], name='attention_mul', mode='mul')
return output_attention_mul
# ## Hyperparameters and builder methods
def model_attention_applied_before_lstm():
inputs = Input(shape=(TIME_STEPS, INPUT_DIM,))
attention_mul = attention_3d_block(inputs)
#attention_mul = attention_spatial_block(inputs)
lstm_units = 32
attention_mul = LSTM(lstm_units, return_sequences=False)(attention_mul)
output = Dense(1, activation='sigmoid')(attention_mul)
model = Model(input=[inputs], output=output)
return model