mulanxiaodingdang commented on issue #17552:
URL: https://github.com/apache/tvm/issues/17552#issuecomment-2520282138

   `/*
    * Licensed to the Apache Software Foundation (ASF) under one
    * or more contributor license agreements.  See the NOTICE file
    * distributed with this work for additional information
    * regarding copyright ownership.  The ASF licenses this file
    * to you under the Apache License, Version 2.0 (the
    * "License"); you may not use this file except in compliance
    * with the License.  You may obtain a copy of the License at
    *
    *   http://www.apache.org/licenses/LICENSE-2.0
    *
    * Unless required by applicable law or agreed to in writing,
    * software distributed under the License is distributed on an
    * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
    * KIND, either express or implied.  See the License for the
    * specific language governing permissions and limitations
    * under the License.
    */
   
   /*!
    * \brief Example code on load and run TVM module.s
    * \file cpp_deploy.cc
    */
   #undef LOG
   #undef LOG_FATAL
   #undef LOG_INFO
   #undef LOG_ERROR
   #undef LOG_WARNING
   #undef CHECK
   #undef CHECK_LT
   #undef CHECK_GT
   #undef CHECK_LE
   #undef CHECK_GE
   #undef CHECK_EQ
   #undef CHECK_NE
   #undef CHECK_NOTNULL
   #undef LOG_IF
   #undef LOG_DFATAL
   #undef DFATAL
   #undef DLOG
   #undef DLOG_IF
   #undef VLOG
   
   #include <dlpack/dlpack.h>
   #include <tvm/runtime/module.h>
   #include <tvm/runtime/packed_func.h>
   #include <tvm/runtime/registry.h>
   
   #include <cstdio>
   
   void DeployGraphExecutor() {
     LOG(INFO) << "Running graph executor...";
     // load in the library
     DLDevice dev{kDLCPU, 0};
     tvm::runtime::Module mod_factory = 
tvm::runtime::Module::LoadFromFile("/mnt/hgfs/results/x86/simple.so");
     LOG(INFO) << "Successfully loading in the library";
     // create the graph executor module
     tvm::runtime::Module gmod = mod_factory.GetFunction("simple")(dev);
     tvm::runtime::PackedFunc set_input = gmod.GetFunction("input");
     tvm::runtime::PackedFunc get_output = gmod.GetFunction("output");
     tvm::runtime::PackedFunc run = gmod.GetFunction("run");
     
     if (set_input == nullptr) {  
       LOG(ERROR) << "Failed to get function 'input' from the module.";  
       //return;  
        } 
     // Use the C++ API
     // 准备输入张量  
       tvm::runtime::NDArray input = tvm::runtime::NDArray::Empty({1, 1, 28, 
28}, DLDataType{kDLFloat, 32, 1}, dev);  
       for (int i = 0; i < 1 * 1 * 28 * 28; ++i) {  
         float random_value = (static_cast<float>(rand()) / RAND_MAX) * 2 - 1;
         static_cast<float*>(input->data)[i] = random_value;  
         }
         
       LOG(INFO) << "Input shape: " << input->shape << ", dtype: " << 
input->dtype;
       LOG(INFO) << "Device type: " << dev.device_type << ", device id: " << 
dev.device_id;
       
       // 准备输出张量  
       tvm::runtime::NDArray output = tvm::runtime::NDArray::Empty({1, 10}, 
DLDataType{kDLFloat, 32, 1}, dev);  
       // 设置输入  
       set_input("input", input);  // 确保输入名称与模型一致  
       // 运行模型  
       run();  
       // 获取输出  
       get_output("output", output);  
       // 打印输出  
       for (int i = 0; i < 10; ++i) {  
           printf("Output[%d]: %f\n", i, static_cast<float*>(output->data)[i]); 
 
       }  
   }
   
   int main(void) {
     //DeploySingleOp();
     DeployGraphExecutor();
     return 0;
   }`  compiling the code use " g++ -g simple_compile.cpp -o compile_simple 
-I/tvm/include -L/tvm/build -I/tvm/lib -I/tvm/3rdparty/dmlc-core/include 
-I/tvm/3rdparty/dlpack/include -ltvm_runtime -lpthread -ldl -lbacktrace" 
.return  "Program received signal SIGSEGV, Segmentation fault.
   0x000055555555c1df in tvm::runtime::PackedFuncObj::CallPacked 
(rv=0x7fffffffdb30, args=..., this=0x0) at 
/tvm/include/tvm/runtime/packed_func.h:1398
   1398   (*f_call_packed_)(this, args, rv);
   " this return come from gdb run
   


-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]

Reply via email to