Skip to content

Commit

Permalink
Add compilation options for datatransform dependencies in dlr_relayvm…
Browse files Browse the repository at this point in the history
….cc (#334)

* add compilation options for datatransform dependencies in dlr_relayvm.cc

* clang foramtting include/dlr_relayvm.h

* compiler option for turning on/off dlr_pipeline_skl_xgb_test

* Enable datatransform in CI

* missing =ON in CMAKE for Jenkins

* Add missing compilation flag to CMAKE_CXX_FLAGS

* typo fix

* Remove debugging message
  • Loading branch information
CloudManX authored and minlu1021 committed Mar 18, 2021
1 parent 11ab4ee commit f4b6214
Show file tree
Hide file tree
Showing 4 changed files with 60 additions and 6 deletions.
11 changes: 9 additions & 2 deletions CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -118,7 +118,7 @@ FILE(GLOB DLR_SRC
)

if(NOT(ENABLE_DATATRANSFORM))
list(REMOVE_ITEM DLR_SRC ${CMAKE_CURRENT_SOURCE_DIR}/src/dlr_data_trasform.cc)
list(REMOVE_ITEM DLR_SRC ${CMAKE_CURRENT_SOURCE_DIR}/src/dlr_data_transform.cc)
endif()

if(USE_OPENCL)
Expand Down Expand Up @@ -233,6 +233,11 @@ if(WITH_HEXAGON)
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -DDLR_HEXAGON")
list(APPEND DLR_SRC "src/dlr_hexagon/dlr_hexagon.cc")
endif()

if(ENABLE_DATATRANSFORM)
add_definitions(-DENABLE_DATATRANSFORM)
endif()

if(AAR_BUILD)
list(APPEND DLR_SRC "src/jni/dlr_jni.cc")
endif()
Expand Down Expand Up @@ -307,7 +312,9 @@ if(NOT(AAR_BUILD))
file(GLOB TEST_SRCS tests/cpp/*.cc)

if(NOT(ENABLE_DATATRANSFORM))
list(REMOVE_ITEM TEST_SRCS ${CMAKE_CURRENT_SOURCE_DIR}/tests/cpp/dlr_data_transform_test.cc)
list(REMOVE_ITEM TEST_SRCS
${CMAKE_CURRENT_SOURCE_DIR}/tests/cpp/dlr_data_transform_test.cc
${CMAKE_CURRENT_SOURCE_DIR}/tests/cpp/dlr_pipeline_skl_xgb_test.cc)
endif()

if(WITH_HEXAGON)
Expand Down
2 changes: 1 addition & 1 deletion Jenkinsfile
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,7 @@ pipeline {
sh """
mkdir -p build
cd build
cmake .. && make -j16
cmake -DENABLE_DATATRANSFORM=ON .. && make -j16
CTEST_OUTPUT_ON_FAILURE=TRUE make test
cd ..
tests/ci_build/create_wheel.sh manylinux1_x86_64
Expand Down
7 changes: 7 additions & 0 deletions include/dlr_relayvm.h
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,10 @@
#include <tvm/runtime/vm/vm.h>

#include "dlr_common.h"

#ifdef ENABLE_DATATRANSFORM
#include "dlr_data_transform.h"
#endif

#ifdef _WIN32
#define LIBEXT ".dll"
Expand Down Expand Up @@ -43,7 +46,11 @@ class DLR_DLL RelayVMModel : public DLRModel {
tvm::runtime::ObjectRef output_ref_;
std::vector<tvm::runtime::NDArray> outputs_;
std::vector<std::vector<int64_t>> output_shapes_;

#ifdef ENABLE_DATATRANSFORM
DataTransform data_transform_;
#endif

void SetupVMModule(const std::vector<std::string>& paths);
void SetupVMModule(const std::vector<DLRModelElem>& model_elems);
void FetchInputNodesData();
Expand Down
46 changes: 43 additions & 3 deletions src/dlr_relayvm.cc
Original file line number Diff line number Diff line change
Expand Up @@ -149,17 +149,23 @@ void RelayVMModel::FetchOutputNodesData() {
}

const char* RelayVMModel::GetInputName(int index) const {
#ifdef ENABLE_DATATRANSFORM
if (HasMetadata() && data_transform_.HasInputTransform(metadata_)) {
return "input";
}
#endif

CHECK_LT(index, num_inputs_) << "Input index is out of range.";
return input_names_[index].c_str();
}

const char* RelayVMModel::GetInputType(int index) const {
#ifdef ENABLE_DATATRANSFORM
if (HasMetadata() && data_transform_.HasInputTransform(metadata_)) {
return "json";
}
#endif

CHECK_LT(index, num_inputs_) << "Input index is out of range.";
return input_types_[index].c_str();
}
Expand All @@ -171,10 +177,13 @@ std::vector<std::string> RelayVMModel::GetWeightNames() const {
}

void RelayVMModel::GetInput(const char* name, void* input) {
#ifdef ENABLE_DATATRANSFORM
if (HasMetadata() && data_transform_.HasInputTransform(metadata_)) {
LOG(WARNING) << "GetInput is not supported for this model.";
return;
}
#endif

int index = GetInputIndex(name);
auto in_array = inputs_[index];
DLTensor input_tensor;
Expand All @@ -189,9 +198,12 @@ void RelayVMModel::GetInput(const char* name, void* input) {
}

int RelayVMModel::GetInputIndex(const char* name) const {
#ifdef ENABLE_DATATRANSFORM
if (HasMetadata() && data_transform_.HasInputTransform(metadata_)) {
return 0;
}
#endif

std::string input_name(name);
for (auto i = 0; i < num_inputs_; i++) {
if (input_name == input_names_[i]) {
Expand Down Expand Up @@ -263,7 +275,8 @@ DLDataType RelayVMModel::GetInputDLDataType(int index) {
}

void RelayVMModel::SetInput(const char* name, const int64_t* shape, const void* input, int dim) {
// Handle string input.
// Handle string input.
#ifdef ENABLE_DATATRANSFORM
if (HasMetadata() && data_transform_.HasInputTransform(metadata_)) {
std::vector<DLDataType> dtypes;
for (size_t i = 0; i < num_inputs_; ++i) {
Expand All @@ -272,6 +285,8 @@ void RelayVMModel::SetInput(const char* name, const int64_t* shape, const void*
data_transform_.TransformInput(metadata_, shape, input, dim, dtypes, ctx_, &inputs_);
return;
}
#endif

int index = GetInputIndex(name);
DLDataType dtype = GetInputDLDataType(index);
DLTensor input_tensor;
Expand All @@ -290,7 +305,8 @@ void RelayVMModel::SetInput(const char* name, const int64_t* shape, const void*
}

void RelayVMModel::SetInputTensor(const char* name, DLTensor* tensor) {
// Handle string input.
// Handle string input.
#ifdef ENABLE_DATATRANSFORM
if (HasMetadata() && data_transform_.HasInputTransform(metadata_)) {
std::vector<DLDataType> dtypes;
for (size_t i = 0; i < num_inputs_; ++i) {
Expand All @@ -300,6 +316,7 @@ void RelayVMModel::SetInputTensor(const char* name, DLTensor* tensor) {
ctx_, &inputs_);
return;
}
#endif

int index = GetInputIndex(name);
if (index > -1) {
Expand Down Expand Up @@ -348,21 +365,26 @@ void RelayVMModel::UpdateOutputs() {
} else {
throw dmlc::Error("Invalid output_ref format!");
}
// Apply DataTransform if needed.
// Apply DataTransform if needed.
#ifdef ENABLE_DATATRANSFORM
for (size_t i = 0; i < outputs_.size(); ++i) {
if (HasMetadata() && data_transform_.HasOutputTransform(metadata_, i)) {
data_transform_.TransformOutput(metadata_, i, outputs_[i]);
}
}
#endif
}

void RelayVMModel::GetOutput(int index, void* output) {
CHECK_LT(index, num_outputs_) << "Output index is out of range.";
auto out_array = outputs_[index];
#ifdef ENABLE_DATATRANSFORM
if (HasMetadata() && data_transform_.HasOutputTransform(metadata_, index)) {
data_transform_.GetOutput(index, output);
return;
}
#endif

DLTensor output_tensor;
output_tensor.data = output;
output_tensor.ctx = DLContext{DLDeviceType::kDLCPU, 0};
Expand All @@ -376,36 +398,45 @@ void RelayVMModel::GetOutput(int index, void* output) {

const void* RelayVMModel::GetOutputPtr(int index) const {
CHECK_LT(index, num_outputs_) << "Output index is out of range.";
#ifdef ENABLE_DATATRANSFORM
if (HasMetadata() && data_transform_.HasOutputTransform(metadata_, index)) {
return data_transform_.GetOutputPtr(index);
}
#endif

return outputs_[index]->data;
}

void RelayVMModel::GetOutputManagedTensorPtr(int index, const DLManagedTensor** out) {
CHECK_LT(index, num_outputs_) << "Output index is out of range.";
auto out_array = outputs_[index];
#ifdef ENABLE_DATATRANSFORM
CHECK(!(HasMetadata() && data_transform_.HasOutputTransform(metadata_, index)))
<< "Output transforms are not supported with GetOutputManagedTensor.";
#endif
*out = out_array.ToDLPack();
}

void RelayVMModel::GetOutputTensor(int index, DLTensor* out) {
CHECK_LT(index, num_outputs_) << "Output index is out of range.";
auto out_array = outputs_[index];
#ifdef ENABLE_DATATRANSFORM
if (HasMetadata() && data_transform_.HasOutputTransform(metadata_, index)) {
data_transform_.GetOutput(index, out->data);
return;
}
#endif
out_array.CopyTo(out);
}

void RelayVMModel::GetOutputShape(int index, int64_t* shape) const {
CHECK_LT(index, num_outputs_) << "Output index is out of range.";
#ifdef ENABLE_DATATRANSFORM
if (HasMetadata() && data_transform_.HasOutputTransform(metadata_, index)) {
data_transform_.GetOutputShape(index, shape);
return;
}
#endif
if (outputs_.empty()) {
// Inference has not been called yet. Get shapes from metadata.
CHECK_LT(index, output_shapes_.size()) << "Output index is out of range.";
Expand All @@ -418,10 +449,13 @@ void RelayVMModel::GetOutputShape(int index, int64_t* shape) const {

void RelayVMModel::GetOutputSizeDim(int index, int64_t* size, int* dim) {
CHECK_LT(index, output_shapes_.size()) << "Output index is out of range.";
#ifdef ENABLE_DATATRANSFORM
if (HasMetadata() && data_transform_.HasOutputTransform(metadata_, index)) {
data_transform_.GetOutputSizeDim(index, size, dim);
return;
}
#endif

*size = 1;
if (index < outputs_.size()) {
auto arr = outputs_[index];
Expand All @@ -444,9 +478,12 @@ void RelayVMModel::GetOutputSizeDim(int index, int64_t* size, int* dim) {

const char* RelayVMModel::GetOutputType(int index) const {
CHECK_LT(index, num_outputs_) << "Output index is out of range.";
#ifdef ENABLE_DATATRANSFORM
if (HasMetadata() && data_transform_.HasOutputTransform(metadata_, index)) {
return "json";
}
#endif

return output_types_[index].c_str();
}

Expand Down Expand Up @@ -482,8 +519,11 @@ void RelayVMModel::GetOutputByName(const char* name, void* out) {
}

int RelayVMModel::GetNumInputs() const {
#ifdef ENABLE_DATATRANSFORM
if (HasMetadata() && data_transform_.HasInputTransform(metadata_)) {
return 1;
}
#endif

return num_inputs_;
}

0 comments on commit f4b6214

Please sign in to comment.