Skip to content
This repository has been archived by the owner on Jan 24, 2024. It is now read-only.

op unittest for relu/reshape #1488

Merged
merged 3 commits into from
Jun 1, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
108 changes: 92 additions & 16 deletions python/tests/ops/test_relu_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,41 +14,37 @@
# See the License for the specific language governing permissions and
# limitations under the License.

import unittest
import numpy as np
from op_test import OpTest, OpTestTool
import paddle
import paddle.nn.functional as F
import cinn
from cinn.frontend import *
from cinn.common import *
from op_test import OpTest, OpTestTool
from op_test_helper import TestCaseHelper


@OpTestTool.skip_if(not is_compiled_with_cuda(),
"x86 test will be skipped due to timeout.")
class TestReluOp(OpTest):
def setUp(self):
self.init_case()
print(f"\nRunning {self.__class__.__name__}: {self.case}")
self.inputs = {}
self.prepare_inputs()

def init_case(self):
def prepare_inputs(self):
self.inputs = {
"x": np.random.random([
32,
64,
]).astype("float32"),
"dout": np.random.random((32, 64)).astype("float32")
"x": self.random(self.case["shape"], self.case["dtype"], -1.0,
1.0),
"dout": self.random(self.case["shape"], self.case["dtype"], -1.0,
1.0)
}

def build_paddle_program(self, target):
x = paddle.to_tensor(self.inputs["x"], stop_gradient=False)
out = F.relu(x)

self.paddle_outputs = [out]
self.paddle_grads = self.get_paddle_grads([out], [x],
[self.inputs["dout"]])

# Note: If the forward and backward operators are run in the same program,
# the forward result will be incorrect.
def build_cinn_program(self, target):
builder = NetBuilder("relu")
x = builder.create_input(
Expand All @@ -60,8 +56,8 @@ def build_cinn_program(self, target):
self.nptype2cinntype(self.inputs["dout"].dtype),
self.inputs["dout"].shape, "dout")
x_grad = builder.relu_grad(dout, out)
prog = builder.build()

prog = builder.build()
res = self.get_cinn_output(
prog,
target, [x, dout], [self.inputs["x"], self.inputs["dout"]],
Expand All @@ -75,5 +71,85 @@ def test_check_results(self):
self.check_outputs_and_grads()


class TestReluOpShape(TestCaseHelper):
def init_attrs(self):
self.class_name = "TestReluOpShape"
self.cls = TestReluOp
self.inputs = [
{
"shape": [10],
},
{
"shape": [8, 5],
},
{
"shape": [10, 3, 5],
},
{
"shape": [80, 40, 5, 7],
},
{
"shape": [80, 1, 5, 7],
},
{
"shape": [80, 3, 1024, 7],
},
{
"shape": [10, 5, 1024, 2048],
},
{
"shape": [1],
},
{
"shape": [512],
},
{
"shape": [1024],
},
{
"shape": [2048],
},
{
"shape": [1, 1, 1, 1],
},
]
self.dtypes = [
{
"dtype": "float32"
},
]
self.attrs = []


class TestReluOpDtype(TestCaseHelper):
def init_attrs(self):
self.class_name = "TestReluOpDtype"
self.cls = TestReluOp
self.inputs = [
{
"shape": [1],
},
{
"shape": [5],
},
{
"shape": [80, 40, 5, 7],
},
]
self.dtypes = [
{
"dtype": "float16"
},
{
"dtype": "float32"
},
{
"dtype": "float64"
},
]
self.attrs = []


if __name__ == "__main__":
unittest.main()
TestReluOpShape().run()
TestReluOpDtype().run()
193 changes: 180 additions & 13 deletions python/tests/ops/test_reshape_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,43 +14,210 @@
# See the License for the specific language governing permissions and
# limitations under the License.

import cinn
import numpy as np
import paddle
import unittest

from cinn.frontend import *
from cinn.common import *
from op_test import OpTest, OpTestTool
from op_test_helper import TestCaseHelper


@OpTestTool.skip_if(not is_compiled_with_cuda(),
"x86 test will be skipped due to timeout.")
class TestReshapeOp(OpTest):
def setUp(self):
self.init_case()
print(f"\nRunning {self.__class__.__name__}: {self.case}")
self.inputs = {}
self.prepare_inputs()

def init_case(self):
self.inputs = {"x": np.random.random([2, 3]).astype("float32")}
def prepare_inputs(self):
self.inputs = {
"x": self.random(self.case["shape"], self.case["dtype"]),
}
self.target_shape = self.case["target_shape"]

def build_paddle_program(self, target):
x = paddle.to_tensor(self.inputs["x"], stop_gradient=True)
out = paddle.reshape(x, [3, 2])
out = paddle.reshape(x, self.target_shape)
self.paddle_outputs = [out]

def build_cinn_program(self, target):
builder = NetBuilder("transpose_test")
x = builder.create_input(Float(32), self.inputs["x"].shape, "x")
out = builder.reshape(x, [3, 2])
builder = NetBuilder("reshape_test")
x = builder.create_input(
self.nptype2cinntype(self.inputs["x"].dtype),
self.inputs["x"].shape, "x")
out = builder.reshape(x, self.target_shape)

prog = builder.build()
res = self.get_cinn_output(prog, target, [x], [self.inputs["x"]],
[out])
self.cinn_outputs = [res[0]]
self.cinn_outputs = res

def test_check_results(self):
self.check_outputs_and_grads(all_equal=True)


class TestReshapeOpShape(TestCaseHelper):
def init_attrs(self):
self.class_name = "TestReshapeOpShape"
self.cls = TestReshapeOp
self.inputs = [
# 1D -> [1-5]D
{
"shape": [100],
"target_shape": [100]
},
{
"shape": [100],
"target_shape": [10, 10]
},
{
"shape": [125],
"target_shape": [5, 5, 5]
},
{
"shape": [256],
"target_shape": [4, 4, 4, 4]
},
{
"shape": [1024],
"target_shape": [8, 8, 4, 4]
},
# 2D -> [1-5]D
{
"shape": [5, 5],
"target_shape": [25]
},
{
"shape": [6, 8],
"target_shape": [4, 12]
},
{
"shape": [10, 20],
"target_shape": [5, 10, 4]
},
{
"shape": [4, 8],
"target_shape": [2, 2, 2, 4]
},
{
"shape": [16, 16],
"target_shape": [4, 2, 2, 1, 16]
},
# 3D -> [1-5]D
{
"shape": [1, 1, 1],
"target_shape": [1]
},
{
"shape": [1, 2, 3],
"target_shape": [6, 1]
},
{
"shape": [4, 8, 16],
"target_shape": [16, 8, 4]
},
{
"shape": [6, 6, 6],
"target_shape": [4, 9, 2, 3]
},
{
"shape": [8, 1, 8],
"target_shape": [2, 2, 2, 2, 4]
},
# 4D -> [1-5]D
{
"shape": [4, 1, 2, 1],
"target_shape": [8]
},
{
"shape": [2, 2, 4, 8],
"target_shape": [4, 32]
},
{
"shape": [6, 7, 8, 9],
"target_shape": [42, 36, 2]
},
{
"shape": [1024, 1, 1, 1],
"target_shape": [4, 4, 8, 8]
},
{
"shape": [10, 20, 30, 40],
"target_shape": [8, 6, 4, 2, 625]
},
# special
{
"shape": [1, 1024, 4],
"target_shape": [1, 2048, 2]
},
{
"shape": [2048, 2, 2],
"target_shape": [256, 8, 4]
},
{
"shape": [1, 1, 256],
"target_shape": [16, 1, 16]
},
{
"shape": [1, 1, 1, 1],
"target_shape": [1, 1]
},
{
"shape": [1, 1, 1],
"target_shape": [1]
},
{
"shape": [1],
"target_shape": [1, 1, 1, 1]
},
]
self.dtypes = [
{
"dtype": "float32"
},
]
self.attrs = []


class TestReshapeOpDtype(TestCaseHelper):
def init_attrs(self):
self.class_name = "TestReshapeOpDtype"
self.cls = TestReshapeOp
self.inputs = [
{
"shape": [2, 3, 4],
"target_shape": [4, 6]
},
]
self.dtypes = [
{
"dtype": "float16"
},
{
"dtype": "float32"
},
{
"dtype": "float64"
},
{
"dtype": "bool"
},
{
"dtype": "uint8"
},
{
"dtype": "int8"
},
{
"dtype": "int32"
},
{
"dtype": "int64"
},
]
self.attrs = []


if __name__ == "__main__":
unittest.main()
TestReshapeOpShape().run()
TestReshapeOpDtype().run()