Skip to content

Commit 2310d47

Browse files
Hotfix arm cross compile (#1479)
* [ARM] support arm-linux-androideabi * add build script * [ARM] specify android api level * [ARM] fix redefine error for arm32 cross-compile * [ARM] add rt library * update Co-authored-by: neiltian <[email protected]>
1 parent b51b245 commit 2310d47

File tree

5 files changed

+68
-14
lines changed

5 files changed

+68
-14
lines changed

CMakeLists.txt

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -263,6 +263,11 @@ if(TNN_SYMBOL_HIDE AND UNIX)
263263
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fvisibility=hidden -fvisibility-inlines-hidden")
264264
endif()
265265

266+
if(SYSTEM.Linux AND CMAKE_SYSTEM_PROCESSOR MATCHES "arm" AND ANDROID_API_LEVAL)
267+
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -D_GLIBCXX_USE_C99_MATH_TR1")
268+
add_definitions(-D__ANDROID_API__=${ANDROID_API_LEVAL})
269+
endif()
270+
266271
if(TNN_X86_ENABLE)
267272
add_subdirectory(source/tnn/device/x86)
268273
set(TARGET_OBJECTS ${TARGET_OBJECTS} "$<TARGET_OBJECTS:TNNX86>")
@@ -277,6 +282,9 @@ if(TNN_ARM_ENABLE)
277282
if(CMAKE_SYSTEM_PROCESSOR MATCHES "aarch64" OR CMAKE_SYSTEM_PROCESSOR STREQUAL "arm64")
278283

279284
elseif(CMAKE_SYSTEM_PROCESSOR MATCHES "arm")
285+
if(SYSTEM.Linux)
286+
add_definitions( -mfloat-abi=softfp )
287+
endif()
280288
add_definitions( -mfpu=neon )
281289
endif()
282290
add_subdirectory(source/tnn/device/arm)

platforms/linux/CMakeLists.txt

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,11 @@ include_directories(${CMAKE_CURRENT_SOURCE_DIR}/platforms/android/src)
1111

1212
set(COMMON_SRC ${CMAKE_CURRENT_SOURCE_DIR}/platforms/android/src/test_common.cc)
1313

14-
target_link_libraries(TNN dl rt)
14+
if(CMAKE_SYSTEM_PROCESSOR MATCHES "arm" AND ANDROID_API_LEVAL)
15+
target_link_libraries(TNN dl log)
16+
else()
17+
target_link_libraries(TNN dl rt)
18+
endif()
1519

1620
if(TNN_X86_ENABLE)
1721
endif()

scripts/build_androideabi_linux.sh

Lines changed: 40 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,40 @@
1+
#!/bin/bash
2+
3+
COMPILER_PATH=${1:-"/usr"}
4+
5+
SHARED_LIB="ON"
6+
ARM="ON"
7+
OPENMP="ON"
8+
OPENCL="OFF"
9+
RKNPU="OFF"
10+
API_LEVEL=18
11+
CC=$COMPILER_PATH/bin/arm-linux-androideabi-gcc
12+
CXX=$COMPILER_PATH/bin/arm-linux-androideabi-g++
13+
TARGET_ARCH=arm
14+
15+
if [ -z $TNN_ROOT_PATH ]
16+
then
17+
TNN_ROOT_PATH=$(cd `dirname $0`; pwd)/..
18+
fi
19+
20+
rm -rf build_androideabi_linux
21+
mkdir build_androideabi_linux
22+
cd build_androideabi_linux
23+
24+
cmake ${TNN_ROOT_PATH} \
25+
-DCMAKE_SYSTEM_NAME=Linux \
26+
-DANDROID_API_LEVAL=$API_LEVEL \
27+
-DTNN_TEST_ENABLE=ON \
28+
-DTNN_CPU_ENABLE=ON \
29+
-DDEBUG=OFF \
30+
-DCMAKE_C_COMPILER=$CC \
31+
-DCMAKE_CXX_COMPILER=$CXX \
32+
-DCMAKE_BUILD_TYPE=Release \
33+
-DTNN_ARM_ENABLE:BOOL=$ARM \
34+
-DTNN_RK_NPU_ENABLE:BOOL=$RKNPU \
35+
-DTNN_OPENMP_ENABLE:BOOL=$OPENMP \
36+
-DTNN_OPENCL_ENABLE:BOOL=$OPENCL \
37+
-DCMAKE_SYSTEM_PROCESSOR=$TARGET_ARCH \
38+
-DTNN_BUILD_SHARED:BOOL=$SHARED_LIB
39+
40+
make -j7

source/tnn/layer/einsum_layer.cc

Lines changed: 12 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,7 @@
1616
#include "tnn/layer/base_layer.h"
1717
#include "tnn/utils/dims_vector_utils.h"
1818
#include "tnn/utils/naive_compute.h"
19+
#include "tnn/utils/string_utils_inner.h"
1920

2021
namespace TNN_NS {
2122

@@ -110,12 +111,12 @@ Status EinsumLayer::InferOutputShape(bool ignore_error) {
110111

111112
case '.':
112113
if (found_ell) {
113-
const std::string message = "Error: einsum() found \'.\' for operand " + std::to_string(curr_op) +
114+
const std::string message = "Error: einsum() found \'.\' for operand " + ToString(curr_op) +
114115
" for which an ellipsis was already found";
115116
return Status(TNNERR_MODEL_ERR, message);
116117
}
117118
if (!(i + 2 < lhs.length() && lhs[++i] == '.' && lhs[++i] == '.')) {
118-
const std::string message = "einsum() found \'.\' for operand " + std::to_string(curr_op) +
119+
const std::string message = "einsum() found \'.\' for operand " + ToString(curr_op) +
119120
" that is not part of any ellipsis";
120121
return Status(TNNERR_MODEL_ERR, message);
121122
}
@@ -137,7 +138,7 @@ Status EinsumLayer::InferOutputShape(bool ignore_error) {
137138
// Parse label
138139
if (lhs[i] < 'a' && lhs[i] > 'z') {
139140
const std::string message = "einsum() operand subscript must be in range [a, z] but found " +
140-
std::to_string(lhs[i]) + " for operand " + std::to_string(curr_op);
141+
ToString(lhs[i]) + " for operand " + ToString(curr_op);
141142
return Status(TNNERR_MODEL_ERR, message);
142143
}
143144
// Convert label to index in [0, 25] and store
@@ -179,10 +180,10 @@ Status EinsumLayer::InferOutputShape(bool ignore_error) {
179180

180181
if (!(has_ellipsis ? nlabels <= ndims : nlabels == ndims)) {
181182
const std::string message = "einsum() the number of subscripts in the equation (" +
182-
std::to_string(nlabels) +
183+
ToString(nlabels) +
183184
(has_ellipsis ? ") is more than the number of dimensions ("
184185
: ") does not match the number of dimensions (") +
185-
std::to_string(ndims) + ") for operand " + std::to_string(i) +
186+
ToString(ndims) + ") for operand " + ToString(i) +
186187
(has_ellipsis ? "" : " and no ellipsis was given");
187188

188189
return Status(TNNERR_MODEL_ERR, message);
@@ -236,13 +237,13 @@ Status EinsumLayer::InferOutputShape(bool ignore_error) {
236237
default:
237238
if (rhs[i] < 'a' && rhs[i] > 'z') {
238239
const std::string message = "einsum() subscripts must be in range [a, z] but found " +
239-
std::to_string(rhs[i]) + " for the output";
240+
ToString(rhs[i]) + " for the output";
240241
return Status(TNNERR_MODEL_ERR, message);
241242
}
242243
const auto label = rhs[i] - 'a';
243244
if (!(label_count[label] > 0 && label_perm_index[label] == -1)) {
244245
const std::string message =
245-
"einsum() output subscript " + std::to_string(rhs[i]) +
246+
"einsum() output subscript " + ToString(rhs[i]) +
246247
(label_perm_index[label] > -1 ? " appears more than once in the output"
247248
: " does not appear in the equation for any input operand");
248249
return Status(TNNERR_MODEL_ERR, message);
@@ -297,10 +298,10 @@ Status EinsumLayer::InferOutputShape(bool ignore_error) {
297298
// Repeated label, take diagonal
298299
const auto dim = label_dim[label];
299300
if (operand_dims[j] != operand_dims[dim]) {
300-
const std::string message = "einsum() subscript " + std::to_string(char(label + 'a')) +
301-
" is repeated for operand " + std::to_string(i) +
302-
" but the sizes don't match, " + std::to_string(operand_dims[j]) +
303-
" != " + std::to_string(operand_dims[dim]);
301+
const std::string message = "einsum() subscript " + ToString(char(label + 'a')) +
302+
" is repeated for operand " + ToString(i) +
303+
" but the sizes don't match, " + ToString(operand_dims[j]) +
304+
" != " + ToString(operand_dims[dim]);
304305
return Status(TNNERR_MODEL_ERR, message);
305306
}
306307
// diagonal is not supported

source/tnn/utils/blob_converter_default.cc

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,7 @@
2323
#include "tnn/utils/bfp16.h"
2424
#include "tnn/utils/bfp16_utils.h"
2525
#include "tnn/utils/dims_utils.h"
26+
#include "tnn/utils/string_utils_inner.h"
2627

2728
namespace TNN_NS {
2829

@@ -265,7 +266,7 @@ Status DefaultBlobConverterAcc::ConvertToMatAsync(Mat &image, MatConvertParam pa
265266
} else {
266267
FREE_INT8_TEMP_DATA();
267268
return Status(TNNERR_PARAM_ERR, "reverse type not support yet, mat type: " +
268-
std::to_string(image.GetMatType()));
269+
ToString(image.GetMatType()));
269270
}
270271
}
271272

@@ -393,7 +394,7 @@ Status DefaultBlobConverterAcc::ConvertFromMatAsync(Mat &image_src, MatConvertPa
393394
} else {
394395
FREE_INT8_TEMP_DATA();
395396
return Status(TNNERR_PARAM_ERR, "reverse type not support yet, mat type: " +
396-
std::to_string(image.GetMatType()));
397+
ToString(image.GetMatType()));
397398
}
398399
image = reversed;
399400
}

0 commit comments

Comments
 (0)