-
Notifications
You must be signed in to change notification settings - Fork 1
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
* Improve tests * Add _POSIX_C_SOURCE back
- Loading branch information
Showing
38 changed files
with
958 additions
and
20 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,34 @@ | ||
# This script generates test cases for nn_act_func_softmax function. | ||
|
||
import numpy as np | ||
|
||
# Returns the softmax activation function result. | ||
def nn_act_func_softmax(x): | ||
exp_x = np.exp(x - np.max(x, axis=-1, keepdims=True)) | ||
return exp_x / np.sum(exp_x, axis=-1, keepdims=True) | ||
|
||
# Generates a test case. | ||
def generate_test_case(input): | ||
input_c = ", ".join(map(str, input.flatten())) | ||
expected_value = nn_act_func_softmax(input) | ||
expected_value_c = ", ".join(map(str, expected_value.flatten())) | ||
return f""" | ||
{{ | ||
.act_func = nn_act_func_init(NN_ACT_FUNC_TENSOR, nn_act_func_softmax), | ||
.input = nn_tensor_init_NNTensor(2, (const size_t[]){{1, {len(input)}}}, false, (const NNTensorUnit[]){{{input_c}}}, NULL), | ||
.expected_value = nn_tensor_init_NNTensor(2, (const size_t[]){{1, {len(input)}}}, false, (const NNTensorUnit[]){{{expected_value_c}}}, NULL), | ||
.expected_tolerance = default_expected_tolerance, | ||
}}""" | ||
|
||
# Generate test cases | ||
np.random.seed(2024) | ||
test_cases = [] | ||
inputs = [ | ||
np.array([0.8, 0.2, 0.1]), | ||
np.array([-0.6, 0.0, 0.6]), | ||
np.array([0.3, -0.3, 0.0]) | ||
] | ||
for input in inputs: | ||
test_cases.append(generate_test_case(input)) | ||
|
||
print(f"TestCase test_cases[] = {{{', '.join(test_cases)},\n}};") |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,22 @@ | ||
# This script generates test cases for nn_act_func_identity function. | ||
|
||
import numpy as np | ||
|
||
# Generates a test case. | ||
def generate_test_case(input): | ||
return f""" | ||
{{ | ||
.act_func = nn_act_func_init(NN_ACT_FUNC_SCALAR, nn_act_func_identity), | ||
.input = {input}, | ||
.expected_value = {input}, | ||
.expected_tolerance = default_expected_tolerance, | ||
}}""" | ||
|
||
# Generate test cases | ||
np.random.seed(2024) | ||
test_cases = [] | ||
inputs = [-1.0, 0.0, 1.0] | ||
for input in inputs: | ||
test_cases.append(generate_test_case(input)) | ||
|
||
print(f"TestCase test_cases[] = {{{', '.join(test_cases)},\n}};") |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,27 @@ | ||
# This script generates test cases for nn_act_func_relu function. | ||
|
||
import numpy as np | ||
|
||
# Returns the ReLU activation function result. | ||
def nn_act_func_relu(x): | ||
return np.maximum(0, x) | ||
|
||
# Generates a test case. | ||
def generate_test_case(input): | ||
expected_value = nn_act_func_relu(input) | ||
return f""" | ||
{{ | ||
.act_func = nn_act_func_init(NN_ACT_FUNC_SCALAR, nn_act_func_relu), | ||
.input = {input}, | ||
.expected_value = {expected_value}, | ||
.expected_tolerance = default_expected_tolerance, | ||
}}""" | ||
|
||
# Generate test cases | ||
np.random.seed(2024) | ||
test_cases = [] | ||
inputs = [-2.0, -1.0, 0.0, 1.0, 2.0] | ||
for input in inputs: | ||
test_cases.append(generate_test_case(input)) | ||
|
||
print(f"TestCase test_cases[] = {{{', '.join(test_cases)},\n}};") |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,33 @@ | ||
# This script generates test cases for nn_act_func_softmax function. | ||
|
||
import numpy as np | ||
|
||
# Returns the ReLU activation function result. | ||
def nn_act_func_relu(x): | ||
return np.maximum(0, x) | ||
|
||
# Generates a test case. | ||
def generate_test_case(input): | ||
input_c = ", ".join(map(str, input.flatten())) | ||
expected_value = nn_act_func_relu(input) | ||
expected_value_c = ", ".join(map(str, expected_value.flatten())) | ||
return f""" | ||
{{ | ||
.act_func = nn_act_func_init(NN_ACT_FUNC_TENSOR, nn_act_func_relu), | ||
.input = nn_tensor_init_NNTensor(2, (const size_t[]){{1, {len(input)}}}, false, (const NNTensorUnit[]){{{input_c}}}, NULL), | ||
.expected_value = nn_tensor_init_NNTensor(2, (const size_t[]){{1, {len(input)}}}, false, (const NNTensorUnit[]){{{expected_value_c}}}, NULL), | ||
.expected_tolerance = default_expected_tolerance, | ||
}}""" | ||
|
||
# Generate test cases | ||
np.random.seed(2024) | ||
test_cases = [] | ||
inputs = [ | ||
np.array([0.8, 0.2, -0.1, 0.0]), | ||
np.array([-0.6, 0.0, 0.6, -1.0]), | ||
np.array([0.3, -0.3, 1.0, 0.0]) | ||
] | ||
for input in inputs: | ||
test_cases.append(generate_test_case(input)) | ||
|
||
print(f"TestCase test_cases[] = {{{', '.join(test_cases)},\n}};") |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,27 @@ | ||
# This script generates test cases for nn_act_func_sigmoid function. | ||
|
||
import numpy as np | ||
|
||
# Returns the sigmoid activation function result. | ||
def nn_act_func_sigmoid(x): | ||
return 1 / (1 + np.exp(-x)) | ||
|
||
# Generates test cases. | ||
def generate_test_case(input): | ||
expected_value = nn_act_func_sigmoid(input) | ||
return f""" | ||
{{ | ||
.act_func = nn_act_func_init(NN_ACT_FUNC_SCALAR, nn_act_func_sigmoid), | ||
.input = {input}, | ||
.expected_value = {expected_value}, | ||
.expected_tolerance = default_expected_tolerance, | ||
}}""" | ||
|
||
# Generate test cases | ||
np.random.seed(2024) | ||
test_cases = [] | ||
inputs = [-2.0, -1.0, 0.0, 1.0, 2.0] | ||
for input in inputs: | ||
test_cases.append(generate_test_case(input)) | ||
|
||
print(f"TestCase test_cases[] = {{{', '.join(test_cases)},\n}};") |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,34 @@ | ||
# This script generates test cases for nn_act_func_softmax function. | ||
|
||
import numpy as np | ||
|
||
# Returns the softmax activation function result. | ||
def nn_act_func_softmax(x): | ||
exp_x = np.exp(x - np.max(x, axis=-1, keepdims=True)) | ||
return exp_x / np.sum(exp_x, axis=-1, keepdims=True) | ||
|
||
# Generates a test case. | ||
def generate_test_case(input): | ||
input_c = ", ".join(map(str, input.flatten())) | ||
expected_value = nn_act_func_softmax(input) | ||
expected_value_c = ", ".join(map(str, expected_value.flatten())) | ||
return f""" | ||
{{ | ||
.act_func = nn_act_func_init(NN_ACT_FUNC_TENSOR, nn_act_func_softmax), | ||
.input = nn_tensor_init_NNTensor(1, (const size_t[]){{{len(input)}}}, false, (const NNTensorUnit[]){{{input_c}}}, NULL), | ||
.expected_value = nn_tensor_init_NNTensor(1, (const size_t[]){{{len(input)}}}, false, (const NNTensorUnit[]){{{expected_value_c}}}, NULL), | ||
.expected_tolerance = default_expected_tolerance, | ||
}}""" | ||
|
||
# Generate test cases | ||
np.random.seed(2024) | ||
test_cases = [] | ||
inputs = [ | ||
np.array([1.0, 2.0, 3.0]), | ||
np.array([-1.0, 0.0, 1.0]), | ||
np.array([0.5, -0.5, 0.0]) | ||
] | ||
for input in inputs: | ||
test_cases.append(generate_test_case(input)) | ||
|
||
print(f"TestCase test_cases[] = {{{', '.join(test_cases)},\n}};") |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,34 @@ | ||
# This script generates test cases for nn_act_func_softmax function. | ||
|
||
import numpy as np | ||
|
||
# Returns the softmax activation function result. | ||
def nn_act_func_softmax(x): | ||
exp_x = np.exp(x - np.max(x, axis=-1, keepdims=True)) | ||
return exp_x / np.sum(exp_x, axis=-1, keepdims=True) | ||
|
||
# Generates a test case. | ||
def generate_test_case(input): | ||
input_c = ", ".join(map(str, input.flatten())) | ||
expected_value = nn_act_func_softmax(input) | ||
expected_value_c = ", ".join(map(str, expected_value.flatten())) | ||
return f""" | ||
{{ | ||
.act_func = nn_act_func_init(NN_ACT_FUNC_TENSOR, nn_act_func_softmax), | ||
.input = nn_tensor_init_NNTensor(2, (const size_t[]){{1, {len(input)}}}, false, (const NNTensorUnit[]){{{input_c}}}, NULL), | ||
.expected_value = nn_tensor_init_NNTensor(2, (const size_t[]){{1, {len(input)}}}, false, (const NNTensorUnit[]){{{expected_value_c}}}, NULL), | ||
.expected_tolerance = default_expected_tolerance, | ||
}}""" | ||
|
||
# Generate test cases | ||
np.random.seed(2024) | ||
test_cases = [] | ||
inputs = [ | ||
np.array([-0.1, 0.2, 0.8, 0.0]), | ||
np.array([1.0, 0.5, -0.4, -1.0]), | ||
np.array([0.9, -0.3, 0.1, 0.0]) | ||
] | ||
for input in inputs: | ||
test_cases.append(generate_test_case(input)) | ||
|
||
print(f"TestCase test_cases[] = {{{', '.join(test_cases)},\n}};") |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
2 changes: 1 addition & 1 deletion
2
scripts/test/layer_multi_gen_tc.py → scripts/test/gen/nn_layer_multi.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,8 @@ | ||
void test_nn_act_func_init(); | ||
void test_nn_act_func(); | ||
void test_nn_act_func_identity(); | ||
void test_nn_act_func_sigmoid(); | ||
void test_nn_act_func_relu(); | ||
void test_nn_act_func_softmax(); | ||
void test_nn_act_func_scalar_batch(); | ||
void test_nn_act_func_tensor_batch(); |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,15 @@ | ||
tests/arch/generic/activation/nn_act_func_init.c | ||
tests/arch/generic/activation/nn_act_func.c | ||
tests/arch/generic/activation/nn_act_func_identity.c | ||
tests/arch/generic/activation/nn_act_func_sigmoid.c | ||
tests/arch/generic/activation/nn_act_func_relu.c | ||
tests/arch/generic/activation/nn_act_func_softmax.c | ||
tests/arch/generic/activation/nn_act_func_scalar_batch.c | ||
tests/arch/generic/activation/nn_act_func_tensor_batch.c | ||
src/nn_activation.c | ||
src/nn_app.c | ||
src/nn_argmax.c | ||
src/nn_config.c | ||
src/nn_error.c | ||
src/nn_test.c | ||
src/nn_tensor.c |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,18 @@ | ||
#include "./activation.h" | ||
#include "nn_app.h" | ||
|
||
int main(int argc, char *argv[]) { | ||
nn_init_app(argc, argv); | ||
// nn_set_debug_level(5); // for debugging | ||
|
||
test_nn_act_func_init(); | ||
test_nn_act_func(); | ||
test_nn_act_func_identity(); | ||
test_nn_act_func_sigmoid(); | ||
test_nn_act_func_relu(); | ||
test_nn_act_func_softmax(); | ||
test_nn_act_func_scalar_batch(); | ||
test_nn_act_func_tensor_batch(); | ||
|
||
return 0; | ||
} |
Oops, something went wrong.