Skip to content

Commit

Permalink
Add quantized models x86_64 and riscv benchmark suites. (iree-org#10054)
Browse files Browse the repository at this point in the history
  • Loading branch information
hanhanW authored Aug 11, 2022
1 parent faf7a75 commit d32f7be
Show file tree
Hide file tree
Showing 3 changed files with 31 additions and 0 deletions.
14 changes: 14 additions & 0 deletions benchmarks/TFLite/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -161,6 +161,20 @@ set(PERSON_DETECT_INT8_MODULE
"1x96x96x1xi8"
)

set(EFFICIENTNET_INT8_MODULE
NAME
"EfficientNet"
TAGS
"int8"
SOURCE
# Mirror of https://tfhub.dev/tensorflow/lite-model/efficientnet/lite0/int8/2
"https://storage.googleapis.com/iree-model-artifacts/efficientnet_lite0_int8_2.tflite"
ENTRY_FUNCTION
"main"
FUNCTION_INPUTS
"1x224x224x3xui8"
)

################################################################################
# Add benchmarks for all platforms. #
################################################################################
Expand Down
5 changes: 5 additions & 0 deletions benchmarks/TFLite/linux-riscv.cmake
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,9 @@ iree_benchmark_suite(
"${DEEPLABV3_FP32_MODULE}"
"${MOBILEBERT_FP32_MODULE}"
"${MOBILENET_V1_MODULE}"
"${MOBILEBERT_INT8_MODULE}"
"${PERSON_DETECT_INT8_MODULE}"
"${EFFICIENTNET_INT8_MODULE}"

BENCHMARK_MODES
"full-inference,default-flags"
Expand All @@ -70,7 +73,9 @@ iree_benchmark_suite(
"linux-riscv"

MODULES
"${MOBILEBERT_INT8_MODULE}"
"${PERSON_DETECT_INT8_MODULE}"
"${EFFICIENTNET_INT8_MODULE}"

BENCHMARK_MODES
"full-inference,default-flags"
Expand Down
12 changes: 12 additions & 0 deletions benchmarks/TFLite/linux-x86_64.cmake
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,9 @@ iree_benchmark_suite(
"${MOBILEBERT_FP32_MODULE}"
"${MOBILENET_V2_MODULE}"
"${MOBILENET_V3SMALL_MODULE}"
"${MOBILEBERT_INT8_MODULE}"
"${PERSON_DETECT_INT8_MODULE}"
"${EFFICIENTNET_INT8_MODULE}"

BENCHMARK_MODES
"full-inference,default-flags"
Expand Down Expand Up @@ -63,6 +66,9 @@ iree_benchmark_suite(
"${MOBILEBERT_FP32_MODULE}"
"${MOBILENET_V2_MODULE}"
"${MOBILENET_V3SMALL_MODULE}"
"${MOBILEBERT_INT8_MODULE}"
"${PERSON_DETECT_INT8_MODULE}"
"${EFFICIENTNET_INT8_MODULE}"

BENCHMARK_MODES
"1-thread,full-inference,default-flags"
Expand Down Expand Up @@ -94,6 +100,9 @@ iree_benchmark_suite(
"${MOBILEBERT_FP32_MODULE}"
"${MOBILENET_V2_MODULE}"
"${MOBILENET_V3SMALL_MODULE}"
"${MOBILEBERT_INT8_MODULE}"
"${PERSON_DETECT_INT8_MODULE}"
"${EFFICIENTNET_INT8_MODULE}"

BENCHMARK_MODES
"4-thread,full-inference,default-flags"
Expand Down Expand Up @@ -125,6 +134,9 @@ iree_benchmark_suite(
"${MOBILEBERT_FP32_MODULE}"
"${MOBILENET_V2_MODULE}"
"${MOBILENET_V3SMALL_MODULE}"
"${MOBILEBERT_INT8_MODULE}"
"${PERSON_DETECT_INT8_MODULE}"
"${EFFICIENTNET_INT8_MODULE}"

BENCHMARK_MODES
"8-thread,full-inference,default-flags"
Expand Down

0 comments on commit d32f7be

Please sign in to comment.