From d2331be5fff65cff8eafc3f379b3fd5279f188d8 Mon Sep 17 00:00:00 2001 From: Louis Jean Date: Mon, 29 Apr 2024 14:38:49 +0000 Subject: [PATCH] feat(torch): add JIT FusionStrategy selection --- src/backends/torch/torchlib.cc | 14 +++++++++++- src/dto/mllib.hpp | 42 ++++++++++++++++++++++++++++++++++ 2 files changed, 55 insertions(+), 1 deletion(-) diff --git a/src/backends/torch/torchlib.cc b/src/backends/torch/torchlib.cc index 9ba706019..419969348 100644 --- a/src/backends/torch/torchlib.cc +++ b/src/backends/torch/torchlib.cc @@ -30,6 +30,7 @@ #include #include #include +#include #include "native/native.h" #include "torchsolver.h" @@ -188,9 +189,20 @@ namespace dd void TorchLib::init_mllib(const APIData &lib_ad) { - // Get parameters auto mllib_dto = lib_ad.createSharedDTO(); + // experimental: jit compiler parameters + if (mllib_dto->jit_compiler_params) + { + auto jit_params = mllib_dto->jit_compiler_params; + torch::jit::FusionStrategy strat = { + { torch::jit::FusionBehavior::STATIC, jit_params->fusion_static }, + { torch::jit::FusionBehavior::DYNAMIC, jit_params->fusion_dynamic } + }; + auto old_strat = torch::jit::setFusionStrategy(strat); + } + + // publish model (from existing repository) if (mllib_dto->from_repository != nullptr) { this->_mlmodel.copy_to_target(mllib_dto->from_repository, diff --git a/src/dto/mllib.hpp b/src/dto/mllib.hpp index b6c118525..a1b70d741 100644 --- a/src/dto/mllib.hpp +++ b/src/dto/mllib.hpp @@ -45,6 +45,38 @@ namespace dd DTO_FIELD(Int32, test_batch_size) = 1; }; + // Torch JIT compiler + /** This parameters are set using the function setFusionStrategy: + * https://github.com/pytorch/pytorch/blob/4c074a9b8bd2e6d8940b40a41ce399e6c4a463a9/torch/csrc/jit/runtime/profiling_graph_executor_impl.cpp#L131 + */ + class JitCompilerParameters : public oatpp::DTO + { + DTO_INIT(JitCompilerParameters, DTO) + + DTO_FIELD_INFO(fusion_static) + { + info->description + = "Set number of STATIC specializations that can occur when " + "compiling jit models. In STATIC fusion, fused ops are compiled " + "to have fixed shapes. See " + "https://pytorch.org/docs/stable/generated/" + "torch.jit.set_fusion_strategy.html for more details."; + } + DTO_FIELD(Int32, fusion_static) = 2; + + DTO_FIELD_INFO(fusion_dynamic) + { + info->description + = "Set number of DYNAMIC specializations that can occur when " + "compiling jit models. In DYNAMIC fusion, fused ops are " + "compiled to have variable input shape, so that this " + "specialization can be used with multiple shapes. See " + "https://pytorch.org/docs/stable/generated/" + "torch.jit.set_fusion_strategy.html for more details"; + } + DTO_FIELD(Int32, fusion_dynamic) = 10; + }; + class MLLib : public oatpp::DTO { DTO_INIT(MLLib, DTO /* extends */) @@ -191,6 +223,16 @@ namespace dd } DTO_FIELD(Boolean, concurrent_predict) = true; + DTO_FIELD_INFO(jit_compiler_params) + { + info->description + = "[experimental] Modify torch jit compiler parameters. This can " + "be useful when the model takes too long to compile, for " + "example. Beware, this parameter applies instantly for all dede " + "services."; + } + DTO_FIELD(Object, jit_compiler_params); + // Libtorch predict options DTO_FIELD_INFO(forward_method) {