Skip to content
This repository has been archived by the owner on Jan 24, 2024. It is now read-only.

Commit

Permalink
fix bug
Browse files Browse the repository at this point in the history
  • Loading branch information
zrr1999 committed Jan 31, 2023
1 parent c4bf7e8 commit 3f8514d
Show file tree
Hide file tree
Showing 2 changed files with 42 additions and 16 deletions.
34 changes: 31 additions & 3 deletions cinn/hlir/pass/common_subexpression_elimination.cc
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,11 @@ std::unordered_set<std::string> reshape_ops = {
};

// Those special attrs maybe different but equivalent.
std::unordered_map<std::string, int> special_attrs = {{"dim", 1}, {"axis", 1}};
std::unordered_map<std::string, int> special_attrs = {
// {"axis", 1}, // due to the issue in some ops
// {"dim", 1}, // due to the issue in some ops
{"axes", 2},
{"perm", 2}};

bool IsSameSubexpression(Node* op1, Node* op2, shape_dict_t& shape_dict) {
// Get the input edges for op1 and op2 in order.
Expand Down Expand Up @@ -127,10 +131,10 @@ bool IsSameSubexpression(Node* op1, Node* op2, shape_dict_t& shape_dict) {
}
auto& attr1 = attr.second;
auto& attr2 = op2->attrs.attr_store[attr.first];
auto ndim = shape_dict[op1_sink_node->id()].size();
auto ndim = static_cast<int>(shape_dict[op1_sink_node->id()].size());
if (special_attrs.count(attr.first)) {
switch (special_attrs[attr.first]) {
case 1:
case 1: {
auto op1_axis = absl::get<int>(attr1);
auto op2_axis = absl::get<int>(attr2);
if (op1_axis < 0) {
Expand All @@ -140,6 +144,30 @@ bool IsSameSubexpression(Node* op1, Node* op2, shape_dict_t& shape_dict) {
op2_axis += ndim;
}
return op2_axis == op1_axis;
}
case 2: {
auto& op1_axes = absl::get<std::vector<int>>(attr1);
auto& op2_axes = absl::get<std::vector<int>>(attr2);
auto op1_size = op1_axes.size();
auto op2_size = op2_axes.size();
if (op1_size != op2_size) {
return false;
}
for (int i = 0; i < op1_axes.size(); ++i) {
int op1_axis = op1_axes[i];
int op2_axis = op2_axes[i];
if (op1_axis < 0) {
op1_axis += ndim;
}
if (op2_axis < 0) {
op2_axis += ndim;
}
if (op2_axis != op1_axis) {
return false;
}
}
return true;
}
}
}
return attr1 == attr2;
Expand Down
24 changes: 11 additions & 13 deletions cinn/hlir/pass/common_subexpression_elimination_test.cc
Original file line number Diff line number Diff line change
Expand Up @@ -30,13 +30,10 @@

#include <memory>

#include "cinn/cinn.h"
#include "cinn/frontend/syntax.h"
#include "cinn/hlir/framework/graph.h"
#include "cinn/hlir/framework/graph_compiler.h"
#include "cinn/hlir/framework/pass.h"
#include "cinn/hlir/op/use_ops.h"
#include "cinn/hlir/pass/use_pass.h"
#include "cinn/utils/data_util.h"

DEFINE_string(model_dir, "", "");
Expand All @@ -48,17 +45,18 @@ using hlir::framework::Scope;
using utils::Join;

TEST(common_subexpression_elimination, common_subexpression_elimination_case1) {
Placeholder A(Float(32), {32, 16}, "A");
Placeholder B(Float(32), {32, 1}, "B", true);
Placeholder A(Float(32), {32, 16, 1}, "A");
Placeholder B(Float(32), {32, 1, 1}, "B", true);

Program program;
auto add_1 = program.add(A, B);
auto add_2 = program.add(B, A);
auto add = program.add(add_1, add_2);
auto t_1 = program.transpose(add, {1, 0});
auto t_2 = program.transpose(add, {1, 0});
auto t_3 = program.transpose(add, {0, 1});
auto max = program.reduce_max(add, {0}, true);
auto add_1 = program.add(A, B);
auto add_2 = program.add(B, A);
auto add = program.add(add_1, add_2);
auto t_1 = program.transpose(add, {2, 1, 0});
auto t_2 = program.transpose(add, {2, 1, 0});
auto t_3 = program.transpose(add, {2, 0, 1});
auto concat = program.concat({t_1, t_2, t_3});
auto max = program.reduce_max(concat, {0}, true);

Target target = common::DefaultTarget();
program.SetInputs({A, B});
Expand All @@ -76,7 +74,7 @@ TEST(common_subexpression_elimination, common_subexpression_elimination_case1) {
auto& prerun_instrs = runtime_program->GetPreRunInstructions();
auto& run_instrs = runtime_program->GetRunInstructions();
ASSERT_EQ(prerun_instrs.size(), 0);
ASSERT_EQ(run_instrs.size(), 5);
ASSERT_EQ(run_instrs.size(), 6);

scope->Var<hlir::framework::Tensor>("A");
scope->Var<hlir::framework::Tensor>("B");
Expand Down

0 comments on commit 3f8514d

Please sign in to comment.