diff --git a/src/gallium/targets/teflon/test_teflon.cpp b/src/gallium/targets/teflon/test_teflon.cpp index 86f85b59fc5..46f45c09cb5 100644 --- a/src/gallium/targets/teflon/test_teflon.cpp +++ b/src/gallium/targets/teflon/test_teflon.cpp @@ -25,8 +25,10 @@ #define TEST_ADD 1 #define TEST_FULLY_CONNECTED 1 #define TEST_MODELS 1 +#define TEST_COMPILED_MODELS 1 #define TOLERANCE 8 +#define COMPILED_MODELS_TOLERANCE 12 std::vector is_signed{false}; /* TODO: Support INT8? */ std::vector padding_same{false, true}; @@ -41,7 +43,7 @@ std::vector fc_channels{23, 46, 128, 256, 512}; std::vector fc_size{128, 1280, 25088, 62720}; static void -test_model(void *buf, size_t buf_size, std::string cache_dir, unsigned tolerance) +run_and_validate(TfLiteModel *cpu_model, TfLiteModel *npu_model, std::string cache_dir, unsigned tolerance) { void **input = NULL; size_t num_inputs; @@ -51,11 +53,8 @@ test_model(void *buf, size_t buf_size, std::string cache_dir, unsigned tolerance size_t num_outputs; void **npu_output; - TfLiteModel *model = TfLiteModelCreate(buf, buf_size); - assert(model); - - run_model(model, EXECUTOR_CPU, &input, &num_inputs, &cpu_output, &output_sizes, &output_types, &num_outputs, cache_dir); - run_model(model, EXECUTOR_NPU, &input, &num_inputs, &npu_output, &output_sizes, &output_types, &num_outputs, cache_dir); + run_model(cpu_model, EXECUTOR_CPU, &input, &num_inputs, &cpu_output, &output_sizes, &output_types, &num_outputs, cache_dir); + run_model(npu_model, EXECUTOR_NPU, &input, &num_inputs, &npu_output, &output_sizes, &output_types, &num_outputs, cache_dir); const char *dump_output = os_get_option("TEFLON_DUMP_OUTPUT"); if (dump_output && atoi(dump_output) == 1) { @@ -221,6 +220,15 @@ test_model(void *buf, size_t buf_size, std::string cache_dir, unsigned tolerance free(output_sizes); free(output_types); +} + +static void +test_model(void *buf, size_t buf_size, std::string cache_dir, unsigned tolerance) +{ + TfLiteModel *model = TfLiteModelCreate(buf, buf_size); + assert(model); + + run_and_validate(model, model, cache_dir, tolerance); TfLiteModelDelete(model); } @@ -249,6 +257,57 @@ test_model_file(std::string file_name, unsigned tolerance, bool use_cache) close(model_fd); } +static void +test_compiled_model(void *buf, size_t buf_size, void *cbuf, size_t cbuf_size, + std::string cache_dir, unsigned tolerance) +{ + TfLiteModel *cpu_model = TfLiteModelCreate(buf, buf_size); + TfLiteModel *npu_model = TfLiteModelCreate(cbuf, cbuf_size); + + assert(cpu_model); + assert(npu_model); + + run_and_validate(cpu_model, npu_model, cache_dir, tolerance); + + TfLiteModelDelete(cpu_model); + TfLiteModelDelete(npu_model); +} + +static void +test_compiled_model_file(std::string file_name, std::string compiled_file_name, + unsigned tolerance, bool use_cache) +{ + std::ostringstream cache_dir; + + if (use_cache) { + auto path = std::filesystem::path(file_name); + cache_dir << "/var/cache/teflon_tests/"; + cache_dir << path.parent_path().filename().string(); + cache_dir << "_"; + cache_dir << path.stem().string(); + } + + srand(4); + + struct stat sb, csb; + int model_fd = open(file_name.c_str(), O_RDONLY); + int cmodel_fd = open(compiled_file_name.c_str(), O_RDONLY); + + fstat(model_fd, &sb); + fstat(cmodel_fd, &csb); + + void *model_data = mmap(0, sb.st_size, PROT_READ, MAP_PRIVATE, model_fd, 0); + void *cmodel_data = mmap(0, csb.st_size, PROT_READ, MAP_PRIVATE, cmodel_fd, 0); + + test_compiled_model(model_data, sb.st_size, cmodel_data, csb.st_size, + cache_dir.str(), tolerance); + + munmap(model_data, sb.st_size); + munmap(cmodel_data, csb.st_size); + close(model_fd); + close(cmodel_fd); +} + void test_conv(int input_size, int weight_size, int input_channels, int output_channels, int stride, bool padding_same, bool is_signed, bool depthwise, int seed) @@ -620,6 +679,66 @@ INSTANTIATE_TEST_SUITE_P( #endif +#if TEST_COMPILED_MODELS + +class CompiledModels : public testing::TestWithParam {}; + +TEST_P(CompiledModels, Op) +{ + std::ostringstream file_path, cfile_path; + auto test_name = GetParam(); + test_name.replace(test_name.find("_"), 1, "/"); + assert(os_get_option("TEFLON_TEST_DATA")); + file_path << os_get_option("TEFLON_TEST_DATA") << "/models/" << test_name << ".tflite"; + cfile_path << os_get_option("TEFLON_TEST_DATA") << "/compiled_models/" << test_name << ".tflite"; + + test_compiled_model_file(file_path.str(), cfile_path.str(), COMPILED_MODELS_TOLERANCE, true); +} + +std::vector +get_compiled_model_files(void) +{ + assert(os_get_option("TEFLON_TEST_DATA")); + std::stringstream dir, cdir; + dir << os_get_option("TEFLON_TEST_DATA") << "/models"; + cdir << os_get_option("TEFLON_TEST_DATA") << "/compiled_models"; + + std::vector paths; + std::filesystem::recursive_directory_iterator b(dir.str()); + for (auto const &f : b) { + if (f.path().extension() != ".tflite") + continue; + + std::filesystem::path relative_path = std::filesystem::relative(f.path(), dir.str()); + std::filesystem::path compiled_path = std::filesystem::path(cdir.str()) / relative_path; + if (!std::filesystem::exists(compiled_path)) + continue; + + std::stringstream path; + path << f.path().parent_path().filename().string(); + path << "_" << f.path().stem().string(); + paths.push_back(path.str()); + } + + std::sort(paths.begin(), paths.end()); + + return paths; +} + +static inline std::string +CompiledModelsTestCaseName( + const testing::TestParamInfo &info) +{ + return info.param; +} + +INSTANTIATE_TEST_SUITE_P( + , CompiledModels, + ::testing::ValuesIn(get_compiled_model_files()), + CompiledModelsTestCaseName); + +#endif + int main(int argc, char **argv) { @@ -656,6 +775,8 @@ main(int argc, char **argv) return 0; } else if (argc > 1 && !strcmp(argv[1], "run_model")) { test_model_file(std::string(argv[2]), TOLERANCE, false); + } else if (argc > 1 && !strcmp(argv[1], "run_compiled_model")) { + test_compiled_model_file(std::string(argv[2]), std::string(argv[3]), COMPILED_MODELS_TOLERANCE, false); } else { testing::InitGoogleTest(&argc, argv); return RUN_ALL_TESTS();