diff --git a/Cargo.toml b/Cargo.toml index 95c605c..680a7df 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,12 +1,12 @@ [package] name = "mmdeploy-sys" -version = "1.0.0" +version = "1.1.0" edition = "2021" license = "Apache-2.0" description = "FFI bindings to MMDeploy" repository = "https://github.com/liu-mengyang/rust-mmdeploy-sys" keywords = ["mmdeploy", "deployment"] - +authors = ["Liu Mengyang","VanderBieu"] # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html diff --git a/README.md b/README.md index 086b3b1..e07895c 100644 --- a/README.md +++ b/README.md @@ -2,7 +2,7 @@ MMDeploy bindings for Rust. -This repo is a low-level MMDeploy abstraction, you can use [rust-mmdeploy](https://github.com/liu-mengyang/rust-mmdeploy) directly if you want to use MMDeploy in Rust. If it cannot cover your requirements, it's easy to build high-level abstraction based on this repo. +This repo is a low-level MMDeploy>=1.0.0 abstraction, you can use [rust-mmdeploy](https://github.com/liu-mengyang/rust-mmdeploy) directly if you want to use MMDeploy in Rust. If it cannot cover your requirements, it's easy to build high-level abstraction based on this repo. ## Support matrix from MMDeploy @@ -35,36 +35,30 @@ In order to successfully build this repo, you are supposed to install some pre-p apt install llvm-dev libclang-dev clang ``` -**Step 2.1. (For ONNXRuntime)** Download and install pre-built mmdeploy package and ONNXRuntime. +**Step 2(Pre-built package).** Download and install pre-built mmdeploy package and ONNXRuntime. ```bash -wget https://github.com/open-mmlab/mmdeploy/releases/download/v0.9.0/mmdeploy-0.9.0-linux-x86_64-onnxruntime1.8.1.tar.gz -tar -zxvf mmdeploy-0.9.0-linux-x86_64-onnxruntime1.8.1.tar.gz -cd mmdeploy-0.9.0-linux-x86_64-onnxruntime1.8.1 -export MMDEPLOY_DIR=$(pwd)/sdk - -wget https://github.com/microsoft/onnxruntime/releases/download/v1.8.1/onnxruntime-linux-x64-1.8.1.tgz -tar -zxvf onnxruntime-linux-x64-1.8.1.tgz -cd onnxruntime-linux-x64-1.8.1 -export ONNXRUNTIME_DIR=$(pwd) +wget https://github.com/open-mmlab/mmdeploy/releases/download/v1.1.0/mmdeploy-1.1.0-linux-x86_64-cuda11.3.tar.gz +tar -zxvf mmdeploy-1.1.0-linux-x86_64-cuda11.3.tar.gz +cd mmdeploy-1.1.0-linux-x86_64-cuda11.3.tar.gz +export MMDEPLOY_DIR=$(pwd) +export ONNXRUNTIME_DIR=$(pwd)/thirdparty/onnxruntime export LD_LIBRARY_PATH=$ONNXRUNTIME_DIR/lib:$LD_LIBRARY_PATH +export TENSORRT_DIR=$(pwd)/thirdparty/tensorrt +export LD_LIBRARY_PATH=$TENSORRT_DIR/lib:$LD_LIBRARY_PATH ``` - -**Step 2.2. (For TensorRT)** Download and install pre-built mmdeploy package and TensorRT. - +Then follow the guide in $MMDEPLOY_DIR/README.md to build the SDK. +**Step 2(Build from source).**Follow the [official guide](https://mmdeploy.readthedocs.io/en/latest/01-how-to-build/build_from_source.html) to build MMDeploy SDK from source. If successfully built, you should have set TENSORRT_DIR, ONNXRUNTIME_DIR and LD_LIBRARY_PATH environment variables. Then ```bash -wget https://github.com/open-mmlab/mmdeploy/releases/download/v0.9.0/mmdeploy-0.9.0-linux-x86_64-cuda11.1-tensorrt8.2.3.0.tar.gz -tar -zxvf mmdeploy-0.9.0-linux-x86_64-cuda11.1-tensorrt8.2.3.0.tar.gz -cd mmdeploy-0.9.0-linux-x86_64-cuda11.1-tensorrt8.2.3.0 -export MMDEPLOY_DIR=$(pwd)/sdk +export MMDEPLOY_DIR=/path/to/mmdeploy/build/install +export LD_LIBRARY_PATH=$MMDEPLOY_DIR/lib:$LD_LIBRARY_PATH ``` - ## Quick start Update your *Cargo.toml* ```toml -mmdeploy-sys = "1.0.0" +mmdeploy-sys = "1.1.0" ``` ## Build diff --git a/build.rs b/build.rs index 5c051a3..586d20c 100644 --- a/build.rs +++ b/build.rs @@ -64,7 +64,7 @@ fn main() { mmdeploy_dir.join("lib").to_string_lossy() ); link_to_libraries(); - vec![mmdeploy_dir.join("include").join("mmdeploy")] + vec![mmdeploy_dir.join("include")] } else { // TODO: Support pkg-config println!("Fallback to pkg-config"); @@ -81,18 +81,18 @@ fn main() { .clang_args(clang_includes); builder = builder - .header(search_include(&include_path, "common.h")) - .header(search_include(&include_path, "model.h")) - .header(search_include(&include_path, "executor.h")) - .header(search_include(&include_path, "pipeline.h")) - .header(search_include(&include_path, "classifier.h")) - .header(search_include(&include_path, "detector.h")) - .header(search_include(&include_path, "segmentor.h")) - .header(search_include(&include_path, "pose_detector.h")) - .header(search_include(&include_path, "rotated_detector.h")) - .header(search_include(&include_path, "text_recognizer.h")) - .header(search_include(&include_path, "text_detector.h")) - .header(search_include(&include_path, "restorer.h")); + .header(search_include(&include_path, "mmdeploy/common.h")) + .header(search_include(&include_path, "mmdeploy/model.h")) + .header(search_include(&include_path, "mmdeploy/executor.h")) + .header(search_include(&include_path, "mmdeploy/pipeline.h")) + .header(search_include(&include_path, "mmdeploy/classifier.h")) + .header(search_include(&include_path, "mmdeploy/detector.h")) + .header(search_include(&include_path, "mmdeploy/segmentor.h")) + .header(search_include(&include_path, "mmdeploy/pose_detector.h")) + .header(search_include(&include_path, "mmdeploy/rotated_detector.h")) + .header(search_include(&include_path, "mmdeploy/text_recognizer.h")) + .header(search_include(&include_path, "mmdeploy/text_detector.h")) + .header(search_include(&include_path, "mmdeploy/restorer.h")); // generate builder let bindings = builder diff --git a/src/lib.rs b/src/lib.rs index 7e4ddb3..4e814ea 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -2,8 +2,6 @@ use std::os::raw::c_char; use std::ffi::CString; include!(concat!(env!("OUT_DIR"), "/bindings.rs")); - - // classifier impl mmdeploy_classifier { pub fn new() -> mmdeploy_classifier { @@ -71,6 +69,7 @@ impl mmdeploy_segmentation_t { width: 0, classes: 0, mask: Box::into_raw(Box::new(0)), + score: Box::into_raw(Box::new(0.0)), } } } @@ -176,6 +175,7 @@ impl mmdeploy_mat_t { channel: 0, format: 0, type_: 0, + device: std::ptr::null_mut(), } } }