|
2 | 2 |
|
3 | 3 | from hls4ml.backends.backend import get_backend |
4 | 4 | from hls4ml.backends.template import FunctionCallTemplate, LayerConfigTemplate |
5 | | -from hls4ml.model.layers import Activation, BatchNormalization, Dense, HardActivation, ParametrizedActivation, PReLU, Softmax |
| 5 | +from hls4ml.model.layers import ( |
| 6 | + Activation, |
| 7 | + BatchNormalization, |
| 8 | + Dense, |
| 9 | + HardActivation, |
| 10 | + LayerNormalization, |
| 11 | + ParametrizedActivation, |
| 12 | + PReLU, |
| 13 | + Softmax, |
| 14 | +) |
6 | 15 | from hls4ml.model.optimizer.passes.hgq_proxy_model import UnaryLUT |
7 | 16 |
|
8 | 17 | # Dense templates |
@@ -136,6 +145,58 @@ def format(self, node): |
136 | 145 | return self.template.format(**params) |
137 | 146 |
|
138 | 147 |
|
| 148 | +# LayerNormalization templates |
| 149 | + |
| 150 | +layernorm_config_template = """struct config{index} : nnet::layernorm_config {{ |
| 151 | + static const unsigned n_in = {n_in}; |
| 152 | + static const unsigned seq_len = {seq_len}; |
| 153 | + static const unsigned axis = {axis}; |
| 154 | + static const unsigned epsilon_power_of_10 = {epsilon_power_of_10}; |
| 155 | + static const unsigned table_range_power2 = {table_range_power2}; |
| 156 | + static const unsigned table_size = {table_size}; |
| 157 | + typedef {accum_t.name} accum_t; |
| 158 | + typedef {bias_t.name} bias_t; |
| 159 | + typedef {scale_t.name} scale_t; |
| 160 | + typedef {table_t.name} table_t; |
| 161 | + static const unsigned io_type = nnet::{iotype}; |
| 162 | + static const unsigned reuse_factor = {reuse}; |
| 163 | + template<class x_T, class y_T> |
| 164 | + using product = nnet::product::{product_type}<x_T, y_T>; |
| 165 | +}};\n""" |
| 166 | + |
| 167 | +layernorm_function_template = 'nnet::layernormalize<{input_t}, {output_t}, {config}>({input}, {output}, {scale}, {bias});' |
| 168 | + |
| 169 | +layernorm_include_list = ['nnet_utils/nnet_layernorm.h'] |
| 170 | + |
| 171 | + |
| 172 | +class LayerNormalizationConfigTemplate(LayerConfigTemplate): |
| 173 | + def __init__(self): |
| 174 | + super().__init__(LayerNormalization) |
| 175 | + self.template = layernorm_config_template |
| 176 | + |
| 177 | + def format(self, node): |
| 178 | + params = self._default_config_params(node) |
| 179 | + params['n_in'] = node.get_input_variable().size_cpp() |
| 180 | + params['product_type'] = get_backend('vivado').product_type( |
| 181 | + node.get_input_variable().type.precision, node.get_weights('scale').type.precision |
| 182 | + ) |
| 183 | + |
| 184 | + return self.template.format(**params) |
| 185 | + |
| 186 | + |
| 187 | +class LayerNormalizationFunctionTemplate(FunctionCallTemplate): |
| 188 | + def __init__(self): |
| 189 | + super().__init__(LayerNormalization, include_header=layernorm_include_list) |
| 190 | + self.template = layernorm_function_template |
| 191 | + |
| 192 | + def format(self, node): |
| 193 | + params = self._default_function_params(node) |
| 194 | + params['scale'] = node.get_weights('scale').name |
| 195 | + params['bias'] = node.get_weights('bias').name |
| 196 | + |
| 197 | + return self.template.format(**params) |
| 198 | + |
| 199 | + |
139 | 200 | # Activation templates |
140 | 201 |
|
141 | 202 | activ_config_template = """struct {type}_config{index} : nnet::activ_config {{ |
|
0 commit comments