From bce6b94d97f80cefc023aff9179cee6123b5da28 Mon Sep 17 00:00:00 2001 From: dengqian <18987653966@163.com> Date: Thu, 16 Sep 2021 11:15:45 +0800 Subject: [PATCH 1/3] infer and modelarts --- .../cv/inceptionv4/infer/convert/aipp.cfg | 14 + .../inceptionv4/infer/convert/convert_om.sh | 28 ++ .../inceptionv4/infer/mxbase/CMakeLists.txt | 56 +++ official/cv/inceptionv4/infer/mxbase/build.sh | 47 +++ .../infer/mxbase/src/InceptionV4.cpp | 217 ++++++++++++ .../infer/mxbase/src/InceptionV4.h | 68 ++++ .../cv/inceptionv4/infer/mxbase/src/main.cpp | 83 +++++ .../sdk/models/inceptionv4/inceptionv4.cfg | 3 + .../infer/sdk/pipeline/InceptionV4.pipeline | 64 ++++ .../infer/sdk/python_inceptionv4/main.py | 95 ++++++ .../infer/sdk/python_inceptionv4/run.sh | 36 ++ .../cv/inceptionv4/modelarts/train_start.py | 322 ++++++++++++++++++ 12 files changed, 1033 insertions(+) create mode 100644 official/cv/inceptionv4/infer/convert/aipp.cfg create mode 100644 official/cv/inceptionv4/infer/convert/convert_om.sh create mode 100644 official/cv/inceptionv4/infer/mxbase/CMakeLists.txt create mode 100644 official/cv/inceptionv4/infer/mxbase/build.sh create mode 100644 official/cv/inceptionv4/infer/mxbase/src/InceptionV4.cpp create mode 100644 official/cv/inceptionv4/infer/mxbase/src/InceptionV4.h create mode 100644 official/cv/inceptionv4/infer/mxbase/src/main.cpp create mode 100644 official/cv/inceptionv4/infer/sdk/models/inceptionv4/inceptionv4.cfg create mode 100644 official/cv/inceptionv4/infer/sdk/pipeline/InceptionV4.pipeline create mode 100644 official/cv/inceptionv4/infer/sdk/python_inceptionv4/main.py create mode 100644 official/cv/inceptionv4/infer/sdk/python_inceptionv4/run.sh create mode 100644 official/cv/inceptionv4/modelarts/train_start.py diff --git a/official/cv/inceptionv4/infer/convert/aipp.cfg b/official/cv/inceptionv4/infer/convert/aipp.cfg new file mode 100644 index 000000000..405144743 --- /dev/null +++ b/official/cv/inceptionv4/infer/convert/aipp.cfg @@ -0,0 +1,14 @@ +aipp_op { + aipp_mode: static + input_format : RGB888_U8 + rbuv_swap_switch : true + mean_chn_0 : 0 + mean_chn_1 : 0 + mean_chn_2 : 0 + min_chn_0 : 125.307 + min_chn_1 : 122.961 + min_chn_2 : 113.8575 + var_reci_chn_0 : 0.0171247538316637 + var_reci_chn_1 : 0.0175070028011204 + var_reci_chn_2 : 0.0174291938997821 +} \ No newline at end of file diff --git a/official/cv/inceptionv4/infer/convert/convert_om.sh b/official/cv/inceptionv4/infer/convert/convert_om.sh new file mode 100644 index 000000000..63eacdea3 --- /dev/null +++ b/official/cv/inceptionv4/infer/convert/convert_om.sh @@ -0,0 +1,28 @@ +#!/bin/bash + +if [ $# -ne 3 ] +then + echo "Wrong parameter format." + echo "Usage:" + echo " bash $0 [INPUT_AIR_PATH] [AIPP_PATH] [OUTPUT_OM_PATH_NAME]" + echo "Example: " + echo " bash convert_om.sh xxx.air ./aipp.cfg xx" + + exit 1 +fi + +input_air_path=$1 +aipp_cfg_file=$2 +output_om_path=$3 + + +echo "Input AIR file path: ${input_air_path}" +echo "Output OM file path: ${output_om_path}" + +atc --input_format=NCHW \ + --framework=1 \ + --model="${input_air_path}" \ + --input_shape="x:1, 3, 299, 299" \ + --output="${output_om_path}" \ + --insert_op_conf="${aipp_cfg_file}" \ + --soc_version=Ascend310 \ No newline at end of file diff --git a/official/cv/inceptionv4/infer/mxbase/CMakeLists.txt b/official/cv/inceptionv4/infer/mxbase/CMakeLists.txt new file mode 100644 index 000000000..f439835b2 --- /dev/null +++ b/official/cv/inceptionv4/infer/mxbase/CMakeLists.txt @@ -0,0 +1,56 @@ +cmake_minimum_required(VERSION 3.10.0) +project(inceptionv4) + +set(TARGET inceptionv4) + +add_definitions(-DENABLE_DVPP_INTERFACE) +add_definitions(-D_GLIBCXX_USE_CXX11_ABI=0) +add_definitions(-Dgoogle=mindxsdk_private) +add_compile_options(-std=c++11 -fPIE -fstack-protector-all -fPIC -Wall) +add_link_options(-Wl,-z,relro,-z,now,-z,noexecstack -s -pie) + +# Check environment variable +if(NOT DEFINED ENV{ASCEND_HOME}) + message(FATAL_ERROR "please define environment variable:ASCEND_HOME") +endif() +if(NOT DEFINED ENV{ASCEND_VERSION}) + message(WARNING "please define environment variable:ASCEND_VERSION") +endif() +if(NOT DEFINED ENV{ARCH_PATTERN}) + message(WARNING "please define environment variable:ARCH_PATTERN") +endif() + +set(ACL_INC_DIR $ENV{ASCEND_HOME}/$ENV{ASCEND_VERSION}/$ENV{ARCH_PATTERN}/acllib/include) +set(ACL_LIB_DIR $ENV{ASCEND_HOME}/$ENV{ASCEND_VERSION}/$ENV{ARCH_PATTERN}/acllib/lib64) + +set(MXBASE_ROOT_DIR $ENV{MX_SDK_HOME}) +set(MXBASE_INC ${MXBASE_ROOT_DIR}/include) +set(MXBASE_LIB_DIR ${MXBASE_ROOT_DIR}/lib) +set(MXBASE_POST_LIB_DIR ${MXBASE_ROOT_DIR}/lib/modelpostprocessors) +set(MXBASE_POST_PROCESS_DIR ${MXBASE_ROOT_DIR}/include/MxBase/postprocess/include) + +if(DEFINED ENV{MXSDK_OPENSOURCE_DIR}) + set(OPENSOURCE_DIR $ENV{MXSDK_OPENSOURCE_DIR}) +else() + set(OPENSOURCE_DIR ${MXBASE_ROOT_DIR}/opensource) +endif() + + +include_directories(${ACL_INC_DIR}) +include_directories(${OPENSOURCE_DIR}/include) +include_directories(${OPENSOURCE_DIR}/include/opencv4) + +include_directories(${MXBASE_INC}) +include_directories(${MXBASE_POST_PROCESS_DIR}) + +link_directories(${ACL_LIB_DIR}) +link_directories(${OPENSOURCE_DIR}/lib) +link_directories(${MXBASE_LIB_DIR}) +link_directories(${MXBASE_POST_LIB_DIR}) + + +add_executable(${TARGET} src/main.cpp src/InceptionV4.cpp) + +target_link_libraries(${TARGET} glog cpprest mxbase resnet50postprocess opencv_world) + +install(TARGETS ${TARGET} RUNTIME DESTINATION ${PROJECT_SOURCE_DIR}/) diff --git a/official/cv/inceptionv4/infer/mxbase/build.sh b/official/cv/inceptionv4/infer/mxbase/build.sh new file mode 100644 index 000000000..76812328b --- /dev/null +++ b/official/cv/inceptionv4/infer/mxbase/build.sh @@ -0,0 +1,47 @@ +#!/bin/bash + +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ + + +# env + +mkdir -p build +cd build || exit + +function make_plugin() { + if ! cmake ..; + then + echo "cmake failed." + return 1 + fi + + if ! (make); + then + echo "make failed." + return 1 + fi + + return 0 +} + +if make_plugin; +then + echo "INFO: Build successfully." +else + echo "ERROR: Build failed." +fi + +cd - || exit diff --git a/official/cv/inceptionv4/infer/mxbase/src/InceptionV4.cpp b/official/cv/inceptionv4/infer/mxbase/src/InceptionV4.cpp new file mode 100644 index 000000000..56e7fd2b5 --- /dev/null +++ b/official/cv/inceptionv4/infer/mxbase/src/InceptionV4.cpp @@ -0,0 +1,217 @@ +/* +* Copyright (c) 2021. Huawei Technologies Co., Ltd. All rights reserved. +* +* Licensed under the Apache License, Version 2.0 (the "License"); +* you may not use this file except in compliance with the License. +* You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ + +#include "InceptionV4.h" +#include +#include +#include +#include +#include "MxBase/DeviceManager/DeviceManager.h" +#include "MxBase/Log/Log.h" + +using namespace MxBase; +namespace { + const uint32_t YUV_BYTE_NU = 3; + const uint32_t YUV_BYTE_DE = 2; + const uint32_t VPC_H_ALIGN = 2; +} + +APP_ERROR InceptionV4::Init(const InitParam &initParam) { + deviceId_ = initParam.deviceId; + APP_ERROR ret = MxBase::DeviceManager::GetInstance()->InitDevices(); + if (ret != APP_ERR_OK) { + LogError << "Init devices failed, ret=" << ret << "."; + return ret; + } + ret = MxBase::TensorContext::GetInstance()->SetContext(initParam.deviceId); + if (ret != APP_ERR_OK) { + LogError << "Set context failed, ret=" << ret << "."; + return ret; + } + dvppWrapper_ = std::make_shared(); + ret = dvppWrapper_->Init(); + if (ret != APP_ERR_OK) { + LogError << "DvppWrapper init failed, ret=" << ret << "."; + return ret; + } + model_ = std::make_shared(); + ret = model_->Init(initParam.modelPath, modelDesc_); + if (ret != APP_ERR_OK) { + LogError << "ModelInferenceProcessor init failed, ret=" << ret << "."; + return ret; + } + MxBase::ConfigData configData; + const std::string softmax = initParam.softmax ? "true" : "false"; + const std::string checkTensor = initParam.checkTensor ? "true" : "false"; + configData.SetJsonValue("CLASS_NUM", std::to_string(initParam.classNum)); + configData.SetJsonValue("TOP_K", std::to_string(initParam.topk)); + configData.SetJsonValue("SOFTMAX", softmax); + configData.SetJsonValue("CHECK_MODEL", checkTensor); + auto jsonStr = configData.GetCfgJson().serialize(); + std::map> config; + config["postProcessConfigContent"] = std::make_shared(jsonStr); + config["labelPath"] = std::make_shared(initParam.labelPath); + post_ = std::make_shared(); + ret = post_->Init(config); + if (ret != APP_ERR_OK) { + LogError << "Resnet50PostProcess init failed, ret=" << ret << "."; + return ret; + } + return APP_ERR_OK; +} + +APP_ERROR InceptionV4::DeInit() { + dvppWrapper_->DeInit(); + model_->DeInit(); + post_->DeInit(); + MxBase::DeviceManager::GetInstance()->DestroyDevices(); + return APP_ERR_OK; +} + +APP_ERROR InceptionV4::ReadImage(const std::string &imgPath, cv::Mat *imageMat) { + *imageMat = cv::imread(imgPath, cv::IMREAD_COLOR); + return APP_ERR_OK; +} + +APP_ERROR InceptionV4::ResizeImage(const cv::Mat &srcImageMat, cv::Mat *dstImageMat) { + static constexpr uint32_t resizeHeight = 299; + static constexpr uint32_t resizeWidth = 299; + cv::resize(srcImageMat, *dstImageMat, cv::Size(resizeWidth, resizeHeight)); + return APP_ERR_OK; +} + +APP_ERROR InceptionV4::CVMatToTensorBase(const cv::Mat &imageMat, + MxBase::TensorBase *tensorBase) { + const uint32_t dataSize = imageMat.cols * imageMat.rows * YUV_BYTE_NU; + LogInfo << "image size after resize" << imageMat.cols << " " << imageMat.rows; + MemoryData memoryDataDst(dataSize, MemoryData::MEMORY_DEVICE, deviceId_); + MemoryData memoryDataSrc(imageMat.data, dataSize, MemoryData::MEMORY_HOST_MALLOC); + APP_ERROR ret = MemoryHelper::MxbsMallocAndCopy(memoryDataDst, memoryDataSrc); + if (ret != APP_ERR_OK) { + LogError << GetError(ret) << "Memory malloc failed."; + return ret; + } + std::vector shape = { imageMat.rows * YUV444_RGB_WIDTH_NU, static_cast(imageMat.cols) }; + *tensorBase = TensorBase(memoryDataDst, false, shape, TENSOR_DTYPE_UINT8); + return APP_ERR_OK; +} + +APP_ERROR InceptionV4::Inference(const std::vector &inputs, + std::vector *outputs) { + auto dtypes = model_->GetOutputDataType(); + for (size_t i = 0; i < modelDesc_.outputTensors.size(); ++i) { + std::vector shape = {}; + for (size_t j = 0; j < modelDesc_.outputTensors[i].tensorDims.size(); ++j) { + shape.push_back((uint32_t)modelDesc_.outputTensors[i].tensorDims[j]); + } + TensorBase tensor(shape, dtypes[i], MemoryData::MemoryType::MEMORY_DEVICE, deviceId_); + APP_ERROR ret = TensorBase::TensorBaseMalloc(tensor); + if (ret != APP_ERR_OK) { + LogError << "TensorBaseMalloc failed, ret=" << ret << "."; + return ret; + } + outputs->push_back(tensor); + } + DynamicInfo dynamicInfo = {}; + dynamicInfo.dynamicType = DynamicType::STATIC_BATCH; + auto startTime = std::chrono::high_resolution_clock::now(); + APP_ERROR ret = model_->ModelInference(inputs, *outputs, dynamicInfo); + auto endTime = std::chrono::high_resolution_clock::now(); + double costMs = std::chrono::duration(endTime - startTime).count(); + inferCostTimeMilliSec += costMs; + if (ret != APP_ERR_OK) { + LogError << "ModelInference failed, ret=" << ret << "."; + return ret; + } + return APP_ERR_OK; +} + +APP_ERROR InceptionV4::PostProcess(const std::vector &inputs, + std::vector> *clsInfos) { + APP_ERROR ret = post_->Process(inputs, *clsInfos); + if (ret != APP_ERR_OK) { + LogError << "Process failed, ret=" << ret << "."; + return ret; + } + return APP_ERR_OK; +} + +APP_ERROR InceptionV4::SaveResult(const std::string &imgPath, + std::vector> *batchClsInfos) { + LogInfo << "image path" << imgPath; + std::string fileName = imgPath.substr(imgPath.find_last_of("/") + 1); + size_t dot = fileName.find_last_of("."); + std::string resFileName = "infer_results/" + fileName.substr(0, dot) + "_1.txt"; + LogInfo << "file path for saving result" << resFileName; + std::ofstream outfile(resFileName); + if (outfile.fail()) { + LogError << "Failed to open result file: "; + return APP_ERR_COMM_FAILURE; + } + uint32_t batchIndex = 0; + for (auto clsInfos : *batchClsInfos) { + std::string resultStr; + for (auto clsInfo : clsInfos) { + LogDebug << " className:" << clsInfo.className << " confidence:" << clsInfo.confidence << + " classIndex:" << clsInfo.classId; + resultStr += std::to_string(clsInfo.classId) + " "; + } + outfile << resultStr << std::endl; + batchIndex++; + } + outfile.close(); + return APP_ERR_OK; +} + +APP_ERROR InceptionV4::Process(const std::string &imgPath) { + cv::Mat imageMat; + APP_ERROR ret = ReadImage(imgPath, &imageMat); + if (ret != APP_ERR_OK) { + LogError << "ReadImage failed, ret=" << ret << "."; + return ret; + } + ResizeImage(imageMat, &imageMat); + std::vector inputs = {}; + std::vector outputs = {}; + TensorBase tensorBase; + ret = CVMatToTensorBase(imageMat, &tensorBase); + if (ret != APP_ERR_OK) { + LogError << "CVMatToTensorBase failed, ret=" << ret << "."; + return ret; + } + inputs.push_back(tensorBase); + auto startTime = std::chrono::high_resolution_clock::now(); + ret = Inference(inputs, &outputs); + auto endTime = std::chrono::high_resolution_clock::now(); + double costMs = std::chrono::duration(endTime - startTime).count(); + inferCostTimeMilliSec += costMs; + if (ret != APP_ERR_OK) { + LogError << "Inference failed, ret=" << ret << "."; + return ret; + } + std::vector> BatchClsInfos = {}; + ret = PostProcess(outputs, &BatchClsInfos); + if (ret != APP_ERR_OK) { + LogError << "PostProcess failed, ret=" << ret << "."; + return ret; + } + ret = SaveResult(imgPath, &BatchClsInfos); + if (ret != APP_ERR_OK) { + LogError << "Save infer results into file failed. ret = " << ret << "."; + return ret; + } + return APP_ERR_OK; +} diff --git a/official/cv/inceptionv4/infer/mxbase/src/InceptionV4.h b/official/cv/inceptionv4/infer/mxbase/src/InceptionV4.h new file mode 100644 index 000000000..3655af688 --- /dev/null +++ b/official/cv/inceptionv4/infer/mxbase/src/InceptionV4.h @@ -0,0 +1,68 @@ +/* + * Copyright (c) 2021. Huawei Technologies Co., Ltd. All rights reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef MXBASE_INCEPTIONV4_H +#define MXBASE_INCEPTIONV4_H + +#include +#include +#include +#include +#include "MxBase/DvppWrapper/DvppWrapper.h" +#include "MxBase/ModelInfer/ModelInferenceProcessor.h" +#include "MxBase/Tensor/TensorContext/TensorContext.h" +#include "ClassPostProcessors/Resnet50PostProcess.h" + +struct InitParam { + uint32_t deviceId; + std::string labelPath; + uint32_t classNum; + uint32_t topk; + bool softmax; + bool checkTensor; + std::string modelPath; +}; + +class InceptionV4 { + public: + APP_ERROR Init(const InitParam &initParam); + APP_ERROR DeInit(); + APP_ERROR ReadImage(const std::string &imgPath, cv::Mat *imageMat); + APP_ERROR ResizeImage(const cv::Mat &srcImageMat, cv::Mat *dstImageMat); + APP_ERROR CVMatToTensorBase(const cv::Mat &imageMat, MxBase::TensorBase *tensorBase); + APP_ERROR Inference(const std::vector &inputs, + std::vector *outputs); + APP_ERROR PostProcess(const std::vector &inputs, + std::vector> *clsInfos); + APP_ERROR Process(const std::string &imgPath); + // get infer time + double GetInferCostMilliSec() const {return inferCostTimeMilliSec;} + + private: + APP_ERROR SaveResult(const std::string &imgPath, + std::vector> *batchClsInfos); + + private: + std::shared_ptr dvppWrapper_; + std::shared_ptr model_; + std::shared_ptr post_; + MxBase::ModelDesc modelDesc_; + uint32_t deviceId_ = 0; + // infer time + double inferCostTimeMilliSec = 0.0; +}; + +#endif diff --git a/official/cv/inceptionv4/infer/mxbase/src/main.cpp b/official/cv/inceptionv4/infer/mxbase/src/main.cpp new file mode 100644 index 000000000..f03638cd0 --- /dev/null +++ b/official/cv/inceptionv4/infer/mxbase/src/main.cpp @@ -0,0 +1,83 @@ +/* + * Copyright (c) 2021. Huawei Technologies Co., Ltd. All rights reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include "InceptionV4.h" +#include "MxBase/Log/Log.h" + +namespace { + const uint32_t CLASS_NUM = 1000; +} // namespace + +APP_ERROR ScanImages(const std::string &path, std::vector *imgFiles) { + DIR *dirPtr = opendir(path.c_str()); + if (dirPtr == nullptr) { + LogError << "opendir failed. dir:" << path; + return APP_ERR_INTERNAL_ERROR; + } + dirent *direntPtr = nullptr; + while ((direntPtr = readdir(dirPtr)) != nullptr) { + std::string fileName = direntPtr->d_name; + if (fileName == "." || fileName == "..") { + continue; + } + + imgFiles->push_back(path + "/" + fileName); + } + closedir(dirPtr); + return APP_ERR_OK; +} + +int main(int argc, char* argv[]) { + if (argc <= 1) { + LogWarn << "Please input image path, such as './inceptionv4 image_dir'."; + return APP_ERR_OK; + } + + InitParam initParam = {}; + initParam.deviceId = 0; + initParam.classNum = CLASS_NUM; + initParam.labelPath = "../data/config/imagenet1000_clsidx_to_labels.names"; + initParam.topk = 5; + initParam.softmax = false; + initParam.checkTensor = true; + initParam.modelPath = "../data/models/inceptionv4_dvpp.om"; + auto inferInceptionv4 = std::make_shared(); + APP_ERROR ret = inferInceptionv4->Init(initParam); + if (ret != APP_ERR_OK) { + LogError << "InceptionV4Classify init failed, ret=" << ret << "."; + return ret; + } + + std::string imgPath = argv[1]; + std::vector imgFilePaths; + ret = ScanImages(imgPath, &imgFilePaths); + if (ret != APP_ERR_OK) { + return ret; + } + for (auto &imgFile : imgFilePaths) { + ret = inferInceptionv4->Process(imgFile); + if (ret != APP_ERR_OK) { + LogError << "InceptionV4Classify process failed, ret=" << ret << "."; + inferInceptionv4->DeInit(); + return ret; + } + } + inferInceptionv4->DeInit(); + double fps = 1000.0 * imgFilePaths.size() / inferInceptionv4->GetInferCostMilliSec(); + LogInfo << " ms\tfps: " << fps << " imgs/sec"; + return APP_ERR_OK; +} diff --git a/official/cv/inceptionv4/infer/sdk/models/inceptionv4/inceptionv4.cfg b/official/cv/inceptionv4/infer/sdk/models/inceptionv4/inceptionv4.cfg new file mode 100644 index 000000000..581fc76d3 --- /dev/null +++ b/official/cv/inceptionv4/infer/sdk/models/inceptionv4/inceptionv4.cfg @@ -0,0 +1,3 @@ +CLASS_NUM=1000 +SOFTMAX=false +TOP_K=5 diff --git a/official/cv/inceptionv4/infer/sdk/pipeline/InceptionV4.pipeline b/official/cv/inceptionv4/infer/sdk/pipeline/InceptionV4.pipeline new file mode 100644 index 000000000..1f185b5fb --- /dev/null +++ b/official/cv/inceptionv4/infer/sdk/pipeline/InceptionV4.pipeline @@ -0,0 +1,64 @@ +{ + "im_inceptionv4": { + "stream_config": { + "deviceId": "0" + }, + "appsrc1": { + "props": { + "blocksize": "409600" + }, + "factory": "appsrc", + "next": "mxpi_imagedecoder0" + }, + "mxpi_imagedecoder0": { + "props": { + "handleMethod": "opencv" + }, + "factory": "mxpi_imagedecoder", + "next": "mxpi_imageresize0" + }, + "mxpi_imageresize0": { + "props": { + "handleMethod": "opencv", + "resizeType": "Resizer_Stretch", + "resizeHeight": "299", + "resizeWidth": "299" + }, + "factory": "mxpi_imageresize", + "next": "mxpi_tensorinfer0" + }, + "mxpi_tensorinfer0": { + "props": { + "dataSource": "mxpi_imageresize0", + "modelPath": "./models/inceptionv4/inceptionv4_dvpp.om", + "waitingTime": "2000", + "outputDeviceId": "-1" + }, + "factory": "mxpi_tensorinfer", + "next": "mxpi_classpostprocessor0" + }, + "mxpi_classpostprocessor0": { + "props": { + "dataSource": "mxpi_tensorinfer0", + "postProcessConfigPath": "./models/inceptionv4/inceptionv4.cfg", + "labelPath": "./models/inceptionv4/imagenet1000_clsidx_to_labels.names", + "postProcessLibPath": "../../../lib/modelpostprocessors/libresnet50postprocess.so" + }, + "factory": "mxpi_classpostprocessor", + "next": "mxpi_dataserialize0" + }, + "mxpi_dataserialize0": { + "props": { + "outputDataKeys": "mxpi_classpostprocessor0" + }, + "factory": "mxpi_dataserialize", + "next": "appsink0" + }, + "appsink0": { + "props": { + "blocksize": "4096000" + }, + "factory": "appsink" + } + } +} diff --git a/official/cv/inceptionv4/infer/sdk/python_inceptionv4/main.py b/official/cv/inceptionv4/infer/sdk/python_inceptionv4/main.py new file mode 100644 index 000000000..e98ff5662 --- /dev/null +++ b/official/cv/inceptionv4/infer/sdk/python_inceptionv4/main.py @@ -0,0 +1,95 @@ +# coding=utf-8 + +""" +Copyright 2020 Huawei Technologies Co., Ltd + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + +http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" + +import datetime +import json +import os +import sys + +from StreamManagerApi import StreamManagerApi +from StreamManagerApi import MxDataInput + +if __name__ == '__main__': + # init stream manager + stream_manager_api = StreamManagerApi() + ret = stream_manager_api.InitManager() + if ret != 0: + print("Failed to init Stream manager, ret=%s" % str(ret)) + exit() + + # create streams by pipeline config file + with open("./pipeline/InceptionV4.pipeline", 'rb') as f: + pipelineStr = f.read() + ret = stream_manager_api.CreateMultipleStreams(pipelineStr) + + if ret != 0: + print("Failed to create Stream, ret=%s" % str(ret)) + exit() + + # Construct the input of the stream + data_input = MxDataInput() + + dir_name = sys.argv[1] + res_dir_name = sys.argv[2] + file_list = os.listdir(dir_name) + if not os.path.exists(res_dir_name): + os.makedirs(res_dir_name) + + for file_name in file_list: + file_path = os.path.join(dir_name, file_name) + if not (file_name.lower().endswith(".jpg") or file_name.lower().endswith(".jpeg")): + continue + + with open(file_path, 'rb') as f: + data_input.data = f.read() + + empty_data = [] + stream_name = b'im_inceptionv4' + in_plugin_id = 0 + unique_id = stream_manager_api.SendData(stream_name, in_plugin_id, data_input) + if unique_id < 0: + print("Failed to send data to stream.") + exit() + # Obtain the inference result by specifying streamName and uniqueId. + start_time = datetime.datetime.now() + infer_result = stream_manager_api.GetResult(stream_name, unique_id) + end_time = datetime.datetime.now() + print('sdk run time: {}'.format((end_time - start_time).microseconds)) + if infer_result.errorCode != 0: + print("GetResultWithUniqueId error. errorCode=%d, errorMsg=%s" % ( + infer_result.errorCode, infer_result.data.decode())) + exit() + # print the infer result + infer_res = infer_result.data.decode() + print("process img: {}, infer result: {}".format(file_name, infer_res)) + + load_dict = json.loads(infer_result.data.decode()) + if load_dict.get('MxpiClass') is None: + dot = file_name.find_last_of(".") + with open(res_dir_name + "/" + file_name.substr(0, dot) + '.txt', 'w') as f_write: + f_write.write("") + continue + res_vec = load_dict.get('MxpiClass') + + with open(res_dir_name + "/" + file_name[:-5] + '_1.txt', 'w') as f_write: + res_list = [str(item.get("classId")) + " " for item in res_vec] + f_write.writelines(res_list) + f_write.write('\n') + + # destroy streams + stream_manager_api.DestroyAllStreams() diff --git a/official/cv/inceptionv4/infer/sdk/python_inceptionv4/run.sh b/official/cv/inceptionv4/infer/sdk/python_inceptionv4/run.sh new file mode 100644 index 000000000..1148e0473 --- /dev/null +++ b/official/cv/inceptionv4/infer/sdk/python_inceptionv4/run.sh @@ -0,0 +1,36 @@ +#!/bin/bash + +# Copyright 2020 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +image_path=$1 +result_dir=$2 + +set -e + +CUR_PATH=$(cd "$(dirname "$0")" || { warn "Failed to check path/to/run.sh" ; exit ; } ; pwd) + +# Simple log helper functions +info() { echo -e "\033[1;34m[INFO ][MxStream] $1\033[1;37m" ; } +warn() { echo >&2 -e "\033[1;31m[WARN ][MxStream] $1\033[1;37m" ; } + +export MX_SDK_HOME=${CUR_PATH}/../../.. +export LD_LIBRARY_PATH=${MX_SDK_HOME}/lib:${MX_SDK_HOME}/opensource/lib:${MX_SDK_HOME}/opensource/lib64:/usr/local/Ascend/ascend-toolkit/latest/acllib/lib64:${LD_LIBRARY_PATH} +export GST_PLUGIN_SCANNER=${MX_SDK_HOME}/opensource/libexec/gstreamer-1.0/gst-plugin-scanner +export GST_PLUGIN_PATH=${MX_SDK_HOME}/opensource/lib/gstreamer-1.0:${MX_SDK_HOME}/lib/plugins + +#to set PYTHONPATH, import the StreamManagerApi.py +export PYTHONPATH=$PYTHONPATH:${MX_SDK_HOME}/python + +python3.7 main.py $image_path $result_dir +exit 0 \ No newline at end of file diff --git a/official/cv/inceptionv4/modelarts/train_start.py b/official/cv/inceptionv4/modelarts/train_start.py new file mode 100644 index 000000000..ad69ab83e --- /dev/null +++ b/official/cv/inceptionv4/modelarts/train_start.py @@ -0,0 +1,322 @@ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +"""train imagenet""" +import time +import math +import os +import argparse +import glob +import moxing as mox +import numpy as np + +from mindspore import Model +from mindspore import Tensor +from mindspore import context +from mindspore.common import set_seed +from mindspore.common.initializer import XavierUniform, initializer +from mindspore.communication import init, get_rank, get_group_size +from mindspore.nn import RMSProp +from mindspore.nn.loss import SoftmaxCrossEntropyWithLogits +from mindspore.train.callback import ModelCheckpoint, CheckpointConfig, TimeMonitor, LossMonitor +from mindspore.train.loss_scale_manager import FixedLossScaleManager +from mindspore.train.model import ParallelMode +from mindspore.train.serialization import load_checkpoint, load_param_into_net +from mindspore import export + +from src.model_utils.config import config +from src.model_utils.device_adapter import get_device_id, get_device_num +from src.dataset import create_dataset_imagenet, create_dataset_cifar10 +from src.inceptionv4 import Inceptionv4 + + +os.environ['PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION'] = 'python' + +DS_DICT = { + "imagenet": create_dataset_imagenet, + "cifar10": create_dataset_cifar10, +} + +config.device_id = get_device_id() +config.device_num = get_device_num() +device_num = config.device_num +create_dataset = DS_DICT[config.ds_type] + +parser = argparse.ArgumentParser(description='Image classification') +parser.add_argument("--filter_weight", type=str, default=True, + help="Filter head weight parameters, default is False.") +parser.add_argument('--data_url', + metavar='DIR', + default='/cache/data_url', + help='path to dataset') +parser.add_argument('--train_url', + default="/cache/output/", + type=str, + help="setting dir of training output") +parser.add_argument('--checkpoint_url', + default="/cache/ckpt/", + type=str, + help="path to checkpoint file") +parser.add_argument('--resume', + default="", + type=str, + help="resume training with existed checkpoint") +parser.add_argument('--ds_type', + default="imagenet", + type=str, + help="dataset type, imagenet or cifar10") +parser.add_argument('--num_classes', + default="1000", + type=str, + help="classes") + +args_opt = parser.parse_args() + +set_seed(1) + + +def generate_cosine_lr(steps_per_epoch, total_epochs, + lr_init=config.lr_init, + lr_end=config.lr_end, + lr_max=config.lr_max, + warmup_epochs=config.warmup_epochs): + """ + Applies cosine decay to generate learning rate array. + + Args: + steps_per_epoch(int): steps number per epoch + total_epochs(int): all epoch in training. + lr_init(float): init learning rate. + lr_end(float): end learning rate + lr_max(float): max learning rate. + warmup_steps(int): all steps in warmup epochs. + + Returns: + np.array, learning rate array. + """ + total_steps = steps_per_epoch * total_epochs + warmup_steps = steps_per_epoch * warmup_epochs + decay_steps = total_steps - warmup_steps + lr_each_step = [] + for i in range(total_steps): + if i < warmup_steps: + lr_inc = (float(lr_max) - float(lr_init)) / float(warmup_steps) + lr = float(lr_init) + lr_inc * (i + 1) + else: + cosine_decay = 0.5 * (1 + math.cos(math.pi * (i - warmup_steps) / decay_steps)) + lr = (lr_max - lr_end) * cosine_decay + lr_end + lr_each_step.append(lr) + learning_rate = np.array(lr_each_step).astype(np.float32) + current_step = steps_per_epoch * (config.start_epoch - 1) + learning_rate = learning_rate[current_step:] + return learning_rate + + +def modelarts_pre_process(): + def unzip(zip_file, save_dir): + import zipfile + s_time = time.time() + if not os.path.exists(os.path.join(save_dir, config.modelarts_dataset_unzip_name)): + zip_isexist = zipfile.is_zipfile(zip_file) + if zip_isexist: + fz = zipfile.ZipFile(zip_file, 'r') + data_num = len(fz.namelist()) + print('Extract Start...') + print('unzip file num: {}'.format(data_num)) + data_print = int(data_num / 100) if data_num > 100 else 1 + i = 0 + for file in fz.namelist(): + if i % data_print == 0: + print('unzip percent: {}%'.format(int(i * 100 / data_num))) + i += 1 + fz.extract(file, save_dir) + print('cost time: {}min:{}s.'.format(int((time.time() - s_time) / 60),\ + int(int(time.time() - s_time) % 60))) + print('Extract Done') + else: + print('This is not zip.') + else: + print('Zip has been extracted.') + + if config.need_modelarts_dataset_unzip: + zip_file_1 = os.path.join(config.data_path, config.modelarts_dataset_unzip_name + '.zip') + save_dir_1 = os.path.join(config.data_path) + + sync_lock = '/tmp/unzip_sync.lock' + + # Each server contains 8 devices as most + if get_device_id() % min(get_device_num(), 8) == 0 and not os.path.exists(sync_lock): + print('Zip file path: ', zip_file_1) + print('Unzip file save dir: ', save_dir_1) + unzip(zip_file_1, save_dir_1) + print('===Finish extract data synchronization===') + try: + os.mknod(sync_lock) + except IOError: + pass + + while True: + if os.path.exists(sync_lock): + break + time.sleep(1) + + print('Device: {}, Finish sync unzip data from {} to {}.'.format(get_device_id(), zip_file_1, save_dir_1)) + print('#' * 200, os.listdir(save_dir_1)) + print('#' * 200, os.listdir(os.path.join(config.data_path, config.modelarts_dataset_unzip_name))) + + config.dataset_path = os.path.join(config.data_path, config.modelarts_dataset_unzip_name) + + +def filter_checkpoint_parameter_by_list(origin_dict, param_filter): + """remove useless parameters according to filter_list""" + for key in list(origin_dict.keys()): + for name in param_filter: + if name in key: + print("Delete parameter from checkpoint: ", key) + del origin_dict[key] + break + + +def frozen_to_air(network, args): + param_dict_t = load_checkpoint(args.get("ckpt_file")) + load_param_into_net(network, param_dict_t) + + input_arr = Tensor(np.zeros([args.get("batch_size"), 3, args.get("height"), args.get("width")], np.float32)) + export(network, input_arr, file_name=args.get("file_name"), file_format=args.get("file_format")) + + +if __name__ == '__main__': + if not os.path.exists(config.data_path): + os.makedirs(config.data_path, exist_ok=True) + if not os.path.exists(config.load_path): + os.makedirs(config.load_path, exist_ok=True) + if not os.path.exists(config.output_path): + os.makedirs(config.output_path, exist_ok=True) + mox.file.copy_parallel(args_opt.data_url, config.data_path) + mox.file.copy_parallel(args_opt.checkpoint_url, config.load_path) + + config.resume = args_opt.resume + config.ds_type = args_opt.ds_type + config.class_num = args_opt.num_classes + + if args_opt.ds_type == "imagenet": + config.ds_type = "imagenet" + modelarts_pre_process() + + print('epoch_size: {} batch_size: {} class_num {}'.format(config.epoch_size, config.batch_size, config.num_classes)) + + context.set_context(mode=context.GRAPH_MODE, device_target=config.platform) + if config.platform == "Ascend": + context.set_context(device_id=get_device_id()) + context.set_context(enable_graph_kernel=False) + + if device_num > 1: + if config.platform == "Ascend": + init(backend_name='hccl') + elif config.platform == "GPU": + init() + else: + raise ValueError("Unsupported device target.") + + config.rank = get_rank() + config.group_size = get_group_size() + context.set_auto_parallel_context(device_num=device_num, + parallel_mode=ParallelMode.DATA_PARALLEL, + gradients_mean=True, + all_reduce_fusion_config=[200, 400]) + else: + config.rank = 0 + config.group_size = 1 + + # create dataset + train_dataset = create_dataset(dataset_path=config.dataset_path, do_train=True, cfg=config) + train_step_size = train_dataset.get_dataset_size() + print("print(train_step_size):", train_step_size) + # create model + net = Inceptionv4(classes=config.num_classes) + # loss + loss = SoftmaxCrossEntropyWithLogits(sparse=True, reduction="mean") + # learning rate + lr_t = Tensor(generate_cosine_lr(steps_per_epoch=train_step_size, total_epochs=config.epoch_size)) + + decayed_params = [] + no_decayed_params = [] + for param in net.trainable_params(): + if 'beta' not in param.name and 'gamma' not in param.name and 'bias' not in param.name: + decayed_params.append(param) + else: + no_decayed_params.append(param) + for param in net.trainable_params(): + if 'beta' not in param.name and 'gamma' not in param.name and 'bias' not in param.name: + param.set_data(initializer(XavierUniform(), param.data.shape, param.data.dtype)) + group_params = [{'params': decayed_params, 'weight_decay': config.weight_decay}, + {'params': no_decayed_params}, + {'order_params': net.trainable_params()}] + + opt = RMSProp(group_params, lr_t, decay=config.decay, epsilon=config.epsilon, weight_decay=config.weight_decay, + momentum=config.momentum, loss_scale=config.loss_scale) + + if get_device_id() == 0: + print(lr_t) + print(train_step_size) + + if config.resume: + ckpt = load_checkpoint(config.resume) + param_dict = load_param_into_net(net, ckpt) + if args_opt.filter_weight: + filter_list = [x.name for x in net.end_point.get_parameters()] + filter_checkpoint_parameter_by_list(param_dict, filter_list) + load_param_into_net(net, param_dict) + + loss_scale_manager = FixedLossScaleManager(config.loss_scale, drop_overflow_update=False) + + model = Model(net, loss_fn=loss, optimizer=opt, metrics={'acc', 'top_1_accuracy', 'top_5_accuracy'}, + loss_scale_manager=loss_scale_manager, amp_level=config.amp_level) + + # define callbacks + performance_cb = TimeMonitor(data_size=train_step_size) + loss_cb = LossMonitor(per_print_times=train_step_size) + ckp_save_step = config.save_checkpoint_epochs * train_step_size + config_ck = CheckpointConfig(save_checkpoint_steps=ckp_save_step, keep_checkpoint_max=config.keep_checkpoint_max) + ckpoint_cb = ModelCheckpoint(prefix=f"inceptionV4-train-rank{config.rank}", + directory=config.output_path, config=config_ck) + callbacks = [performance_cb, loss_cb] + if device_num > 1 and config.is_save_on_master and get_device_id() == 0: + callbacks.append(ckpoint_cb) + else: + callbacks.append(ckpoint_cb) + + # train model + model.train(config.epoch_size, train_dataset, callbacks=callbacks, dataset_sink_mode=config.ds_sink_mode) + + ckpt_list = glob.glob(config.output_path + "/inceptionV4*.ckpt") + if not ckpt_list: + print("ckpt file not generated.") + + ckpt_list.sort(key=os.path.getmtime) + ckpt_model = ckpt_list[-1] + print("checkpoint path", ckpt_model) + + net = Inceptionv4(classes=config.num_classes) + + frozen_to_air_args = {'ckpt_file': ckpt_model, + 'batch_size': 1, + 'height': 299, + 'width': 299, + 'file_name': config.output_path + '/inceptionV4', + 'file_format': 'AIR'} + frozen_to_air(net, frozen_to_air_args) + + mox.file.copy_parallel(config.output_path, args_opt.train_url) + print('Inceptionv4 training success!') -- Gitee From 687ddb3b2bc035acb63d5573b5bdfd262a84277d Mon Sep 17 00:00:00 2001 From: dengqian06 <18987653966@163.com> Date: Sat, 25 Sep 2021 06:29:59 +0000 Subject: [PATCH 2/3] =?UTF-8?q?=E5=88=A0=E9=99=A4=E6=96=87=E4=BB=B6=20offi?= =?UTF-8?q?cial/cv/inceptionv4/infer?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../cv/inceptionv4/infer/convert/aipp.cfg | 14 -- .../inceptionv4/infer/convert/convert_om.sh | 28 --- .../inceptionv4/infer/mxbase/CMakeLists.txt | 56 ----- official/cv/inceptionv4/infer/mxbase/build.sh | 47 ---- .../infer/mxbase/src/InceptionV4.cpp | 217 ------------------ .../infer/mxbase/src/InceptionV4.h | 68 ------ .../cv/inceptionv4/infer/mxbase/src/main.cpp | 83 ------- .../sdk/models/inceptionv4/inceptionv4.cfg | 3 - .../infer/sdk/pipeline/InceptionV4.pipeline | 64 ------ .../infer/sdk/python_inceptionv4/main.py | 95 -------- .../infer/sdk/python_inceptionv4/run.sh | 36 --- 11 files changed, 711 deletions(-) delete mode 100644 official/cv/inceptionv4/infer/convert/aipp.cfg delete mode 100644 official/cv/inceptionv4/infer/convert/convert_om.sh delete mode 100644 official/cv/inceptionv4/infer/mxbase/CMakeLists.txt delete mode 100644 official/cv/inceptionv4/infer/mxbase/build.sh delete mode 100644 official/cv/inceptionv4/infer/mxbase/src/InceptionV4.cpp delete mode 100644 official/cv/inceptionv4/infer/mxbase/src/InceptionV4.h delete mode 100644 official/cv/inceptionv4/infer/mxbase/src/main.cpp delete mode 100644 official/cv/inceptionv4/infer/sdk/models/inceptionv4/inceptionv4.cfg delete mode 100644 official/cv/inceptionv4/infer/sdk/pipeline/InceptionV4.pipeline delete mode 100644 official/cv/inceptionv4/infer/sdk/python_inceptionv4/main.py delete mode 100644 official/cv/inceptionv4/infer/sdk/python_inceptionv4/run.sh diff --git a/official/cv/inceptionv4/infer/convert/aipp.cfg b/official/cv/inceptionv4/infer/convert/aipp.cfg deleted file mode 100644 index 405144743..000000000 --- a/official/cv/inceptionv4/infer/convert/aipp.cfg +++ /dev/null @@ -1,14 +0,0 @@ -aipp_op { - aipp_mode: static - input_format : RGB888_U8 - rbuv_swap_switch : true - mean_chn_0 : 0 - mean_chn_1 : 0 - mean_chn_2 : 0 - min_chn_0 : 125.307 - min_chn_1 : 122.961 - min_chn_2 : 113.8575 - var_reci_chn_0 : 0.0171247538316637 - var_reci_chn_1 : 0.0175070028011204 - var_reci_chn_2 : 0.0174291938997821 -} \ No newline at end of file diff --git a/official/cv/inceptionv4/infer/convert/convert_om.sh b/official/cv/inceptionv4/infer/convert/convert_om.sh deleted file mode 100644 index 63eacdea3..000000000 --- a/official/cv/inceptionv4/infer/convert/convert_om.sh +++ /dev/null @@ -1,28 +0,0 @@ -#!/bin/bash - -if [ $# -ne 3 ] -then - echo "Wrong parameter format." - echo "Usage:" - echo " bash $0 [INPUT_AIR_PATH] [AIPP_PATH] [OUTPUT_OM_PATH_NAME]" - echo "Example: " - echo " bash convert_om.sh xxx.air ./aipp.cfg xx" - - exit 1 -fi - -input_air_path=$1 -aipp_cfg_file=$2 -output_om_path=$3 - - -echo "Input AIR file path: ${input_air_path}" -echo "Output OM file path: ${output_om_path}" - -atc --input_format=NCHW \ - --framework=1 \ - --model="${input_air_path}" \ - --input_shape="x:1, 3, 299, 299" \ - --output="${output_om_path}" \ - --insert_op_conf="${aipp_cfg_file}" \ - --soc_version=Ascend310 \ No newline at end of file diff --git a/official/cv/inceptionv4/infer/mxbase/CMakeLists.txt b/official/cv/inceptionv4/infer/mxbase/CMakeLists.txt deleted file mode 100644 index f439835b2..000000000 --- a/official/cv/inceptionv4/infer/mxbase/CMakeLists.txt +++ /dev/null @@ -1,56 +0,0 @@ -cmake_minimum_required(VERSION 3.10.0) -project(inceptionv4) - -set(TARGET inceptionv4) - -add_definitions(-DENABLE_DVPP_INTERFACE) -add_definitions(-D_GLIBCXX_USE_CXX11_ABI=0) -add_definitions(-Dgoogle=mindxsdk_private) -add_compile_options(-std=c++11 -fPIE -fstack-protector-all -fPIC -Wall) -add_link_options(-Wl,-z,relro,-z,now,-z,noexecstack -s -pie) - -# Check environment variable -if(NOT DEFINED ENV{ASCEND_HOME}) - message(FATAL_ERROR "please define environment variable:ASCEND_HOME") -endif() -if(NOT DEFINED ENV{ASCEND_VERSION}) - message(WARNING "please define environment variable:ASCEND_VERSION") -endif() -if(NOT DEFINED ENV{ARCH_PATTERN}) - message(WARNING "please define environment variable:ARCH_PATTERN") -endif() - -set(ACL_INC_DIR $ENV{ASCEND_HOME}/$ENV{ASCEND_VERSION}/$ENV{ARCH_PATTERN}/acllib/include) -set(ACL_LIB_DIR $ENV{ASCEND_HOME}/$ENV{ASCEND_VERSION}/$ENV{ARCH_PATTERN}/acllib/lib64) - -set(MXBASE_ROOT_DIR $ENV{MX_SDK_HOME}) -set(MXBASE_INC ${MXBASE_ROOT_DIR}/include) -set(MXBASE_LIB_DIR ${MXBASE_ROOT_DIR}/lib) -set(MXBASE_POST_LIB_DIR ${MXBASE_ROOT_DIR}/lib/modelpostprocessors) -set(MXBASE_POST_PROCESS_DIR ${MXBASE_ROOT_DIR}/include/MxBase/postprocess/include) - -if(DEFINED ENV{MXSDK_OPENSOURCE_DIR}) - set(OPENSOURCE_DIR $ENV{MXSDK_OPENSOURCE_DIR}) -else() - set(OPENSOURCE_DIR ${MXBASE_ROOT_DIR}/opensource) -endif() - - -include_directories(${ACL_INC_DIR}) -include_directories(${OPENSOURCE_DIR}/include) -include_directories(${OPENSOURCE_DIR}/include/opencv4) - -include_directories(${MXBASE_INC}) -include_directories(${MXBASE_POST_PROCESS_DIR}) - -link_directories(${ACL_LIB_DIR}) -link_directories(${OPENSOURCE_DIR}/lib) -link_directories(${MXBASE_LIB_DIR}) -link_directories(${MXBASE_POST_LIB_DIR}) - - -add_executable(${TARGET} src/main.cpp src/InceptionV4.cpp) - -target_link_libraries(${TARGET} glog cpprest mxbase resnet50postprocess opencv_world) - -install(TARGETS ${TARGET} RUNTIME DESTINATION ${PROJECT_SOURCE_DIR}/) diff --git a/official/cv/inceptionv4/infer/mxbase/build.sh b/official/cv/inceptionv4/infer/mxbase/build.sh deleted file mode 100644 index 76812328b..000000000 --- a/official/cv/inceptionv4/infer/mxbase/build.sh +++ /dev/null @@ -1,47 +0,0 @@ -#!/bin/bash - -# Copyright 2021 Huawei Technologies Co., Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================ - - -# env - -mkdir -p build -cd build || exit - -function make_plugin() { - if ! cmake ..; - then - echo "cmake failed." - return 1 - fi - - if ! (make); - then - echo "make failed." - return 1 - fi - - return 0 -} - -if make_plugin; -then - echo "INFO: Build successfully." -else - echo "ERROR: Build failed." -fi - -cd - || exit diff --git a/official/cv/inceptionv4/infer/mxbase/src/InceptionV4.cpp b/official/cv/inceptionv4/infer/mxbase/src/InceptionV4.cpp deleted file mode 100644 index 56e7fd2b5..000000000 --- a/official/cv/inceptionv4/infer/mxbase/src/InceptionV4.cpp +++ /dev/null @@ -1,217 +0,0 @@ -/* -* Copyright (c) 2021. Huawei Technologies Co., Ltd. All rights reserved. -* -* Licensed under the Apache License, Version 2.0 (the "License"); -* you may not use this file except in compliance with the License. -* You may obtain a copy of the License at -* -* http://www.apache.org/licenses/LICENSE-2.0 -* -* Unless required by applicable law or agreed to in writing, software -* distributed under the License is distributed on an "AS IS" BASIS, -* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -* See the License for the specific language governing permissions and -* limitations under the License. -*/ - -#include "InceptionV4.h" -#include -#include -#include -#include -#include "MxBase/DeviceManager/DeviceManager.h" -#include "MxBase/Log/Log.h" - -using namespace MxBase; -namespace { - const uint32_t YUV_BYTE_NU = 3; - const uint32_t YUV_BYTE_DE = 2; - const uint32_t VPC_H_ALIGN = 2; -} - -APP_ERROR InceptionV4::Init(const InitParam &initParam) { - deviceId_ = initParam.deviceId; - APP_ERROR ret = MxBase::DeviceManager::GetInstance()->InitDevices(); - if (ret != APP_ERR_OK) { - LogError << "Init devices failed, ret=" << ret << "."; - return ret; - } - ret = MxBase::TensorContext::GetInstance()->SetContext(initParam.deviceId); - if (ret != APP_ERR_OK) { - LogError << "Set context failed, ret=" << ret << "."; - return ret; - } - dvppWrapper_ = std::make_shared(); - ret = dvppWrapper_->Init(); - if (ret != APP_ERR_OK) { - LogError << "DvppWrapper init failed, ret=" << ret << "."; - return ret; - } - model_ = std::make_shared(); - ret = model_->Init(initParam.modelPath, modelDesc_); - if (ret != APP_ERR_OK) { - LogError << "ModelInferenceProcessor init failed, ret=" << ret << "."; - return ret; - } - MxBase::ConfigData configData; - const std::string softmax = initParam.softmax ? "true" : "false"; - const std::string checkTensor = initParam.checkTensor ? "true" : "false"; - configData.SetJsonValue("CLASS_NUM", std::to_string(initParam.classNum)); - configData.SetJsonValue("TOP_K", std::to_string(initParam.topk)); - configData.SetJsonValue("SOFTMAX", softmax); - configData.SetJsonValue("CHECK_MODEL", checkTensor); - auto jsonStr = configData.GetCfgJson().serialize(); - std::map> config; - config["postProcessConfigContent"] = std::make_shared(jsonStr); - config["labelPath"] = std::make_shared(initParam.labelPath); - post_ = std::make_shared(); - ret = post_->Init(config); - if (ret != APP_ERR_OK) { - LogError << "Resnet50PostProcess init failed, ret=" << ret << "."; - return ret; - } - return APP_ERR_OK; -} - -APP_ERROR InceptionV4::DeInit() { - dvppWrapper_->DeInit(); - model_->DeInit(); - post_->DeInit(); - MxBase::DeviceManager::GetInstance()->DestroyDevices(); - return APP_ERR_OK; -} - -APP_ERROR InceptionV4::ReadImage(const std::string &imgPath, cv::Mat *imageMat) { - *imageMat = cv::imread(imgPath, cv::IMREAD_COLOR); - return APP_ERR_OK; -} - -APP_ERROR InceptionV4::ResizeImage(const cv::Mat &srcImageMat, cv::Mat *dstImageMat) { - static constexpr uint32_t resizeHeight = 299; - static constexpr uint32_t resizeWidth = 299; - cv::resize(srcImageMat, *dstImageMat, cv::Size(resizeWidth, resizeHeight)); - return APP_ERR_OK; -} - -APP_ERROR InceptionV4::CVMatToTensorBase(const cv::Mat &imageMat, - MxBase::TensorBase *tensorBase) { - const uint32_t dataSize = imageMat.cols * imageMat.rows * YUV_BYTE_NU; - LogInfo << "image size after resize" << imageMat.cols << " " << imageMat.rows; - MemoryData memoryDataDst(dataSize, MemoryData::MEMORY_DEVICE, deviceId_); - MemoryData memoryDataSrc(imageMat.data, dataSize, MemoryData::MEMORY_HOST_MALLOC); - APP_ERROR ret = MemoryHelper::MxbsMallocAndCopy(memoryDataDst, memoryDataSrc); - if (ret != APP_ERR_OK) { - LogError << GetError(ret) << "Memory malloc failed."; - return ret; - } - std::vector shape = { imageMat.rows * YUV444_RGB_WIDTH_NU, static_cast(imageMat.cols) }; - *tensorBase = TensorBase(memoryDataDst, false, shape, TENSOR_DTYPE_UINT8); - return APP_ERR_OK; -} - -APP_ERROR InceptionV4::Inference(const std::vector &inputs, - std::vector *outputs) { - auto dtypes = model_->GetOutputDataType(); - for (size_t i = 0; i < modelDesc_.outputTensors.size(); ++i) { - std::vector shape = {}; - for (size_t j = 0; j < modelDesc_.outputTensors[i].tensorDims.size(); ++j) { - shape.push_back((uint32_t)modelDesc_.outputTensors[i].tensorDims[j]); - } - TensorBase tensor(shape, dtypes[i], MemoryData::MemoryType::MEMORY_DEVICE, deviceId_); - APP_ERROR ret = TensorBase::TensorBaseMalloc(tensor); - if (ret != APP_ERR_OK) { - LogError << "TensorBaseMalloc failed, ret=" << ret << "."; - return ret; - } - outputs->push_back(tensor); - } - DynamicInfo dynamicInfo = {}; - dynamicInfo.dynamicType = DynamicType::STATIC_BATCH; - auto startTime = std::chrono::high_resolution_clock::now(); - APP_ERROR ret = model_->ModelInference(inputs, *outputs, dynamicInfo); - auto endTime = std::chrono::high_resolution_clock::now(); - double costMs = std::chrono::duration(endTime - startTime).count(); - inferCostTimeMilliSec += costMs; - if (ret != APP_ERR_OK) { - LogError << "ModelInference failed, ret=" << ret << "."; - return ret; - } - return APP_ERR_OK; -} - -APP_ERROR InceptionV4::PostProcess(const std::vector &inputs, - std::vector> *clsInfos) { - APP_ERROR ret = post_->Process(inputs, *clsInfos); - if (ret != APP_ERR_OK) { - LogError << "Process failed, ret=" << ret << "."; - return ret; - } - return APP_ERR_OK; -} - -APP_ERROR InceptionV4::SaveResult(const std::string &imgPath, - std::vector> *batchClsInfos) { - LogInfo << "image path" << imgPath; - std::string fileName = imgPath.substr(imgPath.find_last_of("/") + 1); - size_t dot = fileName.find_last_of("."); - std::string resFileName = "infer_results/" + fileName.substr(0, dot) + "_1.txt"; - LogInfo << "file path for saving result" << resFileName; - std::ofstream outfile(resFileName); - if (outfile.fail()) { - LogError << "Failed to open result file: "; - return APP_ERR_COMM_FAILURE; - } - uint32_t batchIndex = 0; - for (auto clsInfos : *batchClsInfos) { - std::string resultStr; - for (auto clsInfo : clsInfos) { - LogDebug << " className:" << clsInfo.className << " confidence:" << clsInfo.confidence << - " classIndex:" << clsInfo.classId; - resultStr += std::to_string(clsInfo.classId) + " "; - } - outfile << resultStr << std::endl; - batchIndex++; - } - outfile.close(); - return APP_ERR_OK; -} - -APP_ERROR InceptionV4::Process(const std::string &imgPath) { - cv::Mat imageMat; - APP_ERROR ret = ReadImage(imgPath, &imageMat); - if (ret != APP_ERR_OK) { - LogError << "ReadImage failed, ret=" << ret << "."; - return ret; - } - ResizeImage(imageMat, &imageMat); - std::vector inputs = {}; - std::vector outputs = {}; - TensorBase tensorBase; - ret = CVMatToTensorBase(imageMat, &tensorBase); - if (ret != APP_ERR_OK) { - LogError << "CVMatToTensorBase failed, ret=" << ret << "."; - return ret; - } - inputs.push_back(tensorBase); - auto startTime = std::chrono::high_resolution_clock::now(); - ret = Inference(inputs, &outputs); - auto endTime = std::chrono::high_resolution_clock::now(); - double costMs = std::chrono::duration(endTime - startTime).count(); - inferCostTimeMilliSec += costMs; - if (ret != APP_ERR_OK) { - LogError << "Inference failed, ret=" << ret << "."; - return ret; - } - std::vector> BatchClsInfos = {}; - ret = PostProcess(outputs, &BatchClsInfos); - if (ret != APP_ERR_OK) { - LogError << "PostProcess failed, ret=" << ret << "."; - return ret; - } - ret = SaveResult(imgPath, &BatchClsInfos); - if (ret != APP_ERR_OK) { - LogError << "Save infer results into file failed. ret = " << ret << "."; - return ret; - } - return APP_ERR_OK; -} diff --git a/official/cv/inceptionv4/infer/mxbase/src/InceptionV4.h b/official/cv/inceptionv4/infer/mxbase/src/InceptionV4.h deleted file mode 100644 index 3655af688..000000000 --- a/official/cv/inceptionv4/infer/mxbase/src/InceptionV4.h +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Copyright (c) 2021. Huawei Technologies Co., Ltd. All rights reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#ifndef MXBASE_INCEPTIONV4_H -#define MXBASE_INCEPTIONV4_H - -#include -#include -#include -#include -#include "MxBase/DvppWrapper/DvppWrapper.h" -#include "MxBase/ModelInfer/ModelInferenceProcessor.h" -#include "MxBase/Tensor/TensorContext/TensorContext.h" -#include "ClassPostProcessors/Resnet50PostProcess.h" - -struct InitParam { - uint32_t deviceId; - std::string labelPath; - uint32_t classNum; - uint32_t topk; - bool softmax; - bool checkTensor; - std::string modelPath; -}; - -class InceptionV4 { - public: - APP_ERROR Init(const InitParam &initParam); - APP_ERROR DeInit(); - APP_ERROR ReadImage(const std::string &imgPath, cv::Mat *imageMat); - APP_ERROR ResizeImage(const cv::Mat &srcImageMat, cv::Mat *dstImageMat); - APP_ERROR CVMatToTensorBase(const cv::Mat &imageMat, MxBase::TensorBase *tensorBase); - APP_ERROR Inference(const std::vector &inputs, - std::vector *outputs); - APP_ERROR PostProcess(const std::vector &inputs, - std::vector> *clsInfos); - APP_ERROR Process(const std::string &imgPath); - // get infer time - double GetInferCostMilliSec() const {return inferCostTimeMilliSec;} - - private: - APP_ERROR SaveResult(const std::string &imgPath, - std::vector> *batchClsInfos); - - private: - std::shared_ptr dvppWrapper_; - std::shared_ptr model_; - std::shared_ptr post_; - MxBase::ModelDesc modelDesc_; - uint32_t deviceId_ = 0; - // infer time - double inferCostTimeMilliSec = 0.0; -}; - -#endif diff --git a/official/cv/inceptionv4/infer/mxbase/src/main.cpp b/official/cv/inceptionv4/infer/mxbase/src/main.cpp deleted file mode 100644 index f03638cd0..000000000 --- a/official/cv/inceptionv4/infer/mxbase/src/main.cpp +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Copyright (c) 2021. Huawei Technologies Co., Ltd. All rights reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#include -#include "InceptionV4.h" -#include "MxBase/Log/Log.h" - -namespace { - const uint32_t CLASS_NUM = 1000; -} // namespace - -APP_ERROR ScanImages(const std::string &path, std::vector *imgFiles) { - DIR *dirPtr = opendir(path.c_str()); - if (dirPtr == nullptr) { - LogError << "opendir failed. dir:" << path; - return APP_ERR_INTERNAL_ERROR; - } - dirent *direntPtr = nullptr; - while ((direntPtr = readdir(dirPtr)) != nullptr) { - std::string fileName = direntPtr->d_name; - if (fileName == "." || fileName == "..") { - continue; - } - - imgFiles->push_back(path + "/" + fileName); - } - closedir(dirPtr); - return APP_ERR_OK; -} - -int main(int argc, char* argv[]) { - if (argc <= 1) { - LogWarn << "Please input image path, such as './inceptionv4 image_dir'."; - return APP_ERR_OK; - } - - InitParam initParam = {}; - initParam.deviceId = 0; - initParam.classNum = CLASS_NUM; - initParam.labelPath = "../data/config/imagenet1000_clsidx_to_labels.names"; - initParam.topk = 5; - initParam.softmax = false; - initParam.checkTensor = true; - initParam.modelPath = "../data/models/inceptionv4_dvpp.om"; - auto inferInceptionv4 = std::make_shared(); - APP_ERROR ret = inferInceptionv4->Init(initParam); - if (ret != APP_ERR_OK) { - LogError << "InceptionV4Classify init failed, ret=" << ret << "."; - return ret; - } - - std::string imgPath = argv[1]; - std::vector imgFilePaths; - ret = ScanImages(imgPath, &imgFilePaths); - if (ret != APP_ERR_OK) { - return ret; - } - for (auto &imgFile : imgFilePaths) { - ret = inferInceptionv4->Process(imgFile); - if (ret != APP_ERR_OK) { - LogError << "InceptionV4Classify process failed, ret=" << ret << "."; - inferInceptionv4->DeInit(); - return ret; - } - } - inferInceptionv4->DeInit(); - double fps = 1000.0 * imgFilePaths.size() / inferInceptionv4->GetInferCostMilliSec(); - LogInfo << " ms\tfps: " << fps << " imgs/sec"; - return APP_ERR_OK; -} diff --git a/official/cv/inceptionv4/infer/sdk/models/inceptionv4/inceptionv4.cfg b/official/cv/inceptionv4/infer/sdk/models/inceptionv4/inceptionv4.cfg deleted file mode 100644 index 581fc76d3..000000000 --- a/official/cv/inceptionv4/infer/sdk/models/inceptionv4/inceptionv4.cfg +++ /dev/null @@ -1,3 +0,0 @@ -CLASS_NUM=1000 -SOFTMAX=false -TOP_K=5 diff --git a/official/cv/inceptionv4/infer/sdk/pipeline/InceptionV4.pipeline b/official/cv/inceptionv4/infer/sdk/pipeline/InceptionV4.pipeline deleted file mode 100644 index 1f185b5fb..000000000 --- a/official/cv/inceptionv4/infer/sdk/pipeline/InceptionV4.pipeline +++ /dev/null @@ -1,64 +0,0 @@ -{ - "im_inceptionv4": { - "stream_config": { - "deviceId": "0" - }, - "appsrc1": { - "props": { - "blocksize": "409600" - }, - "factory": "appsrc", - "next": "mxpi_imagedecoder0" - }, - "mxpi_imagedecoder0": { - "props": { - "handleMethod": "opencv" - }, - "factory": "mxpi_imagedecoder", - "next": "mxpi_imageresize0" - }, - "mxpi_imageresize0": { - "props": { - "handleMethod": "opencv", - "resizeType": "Resizer_Stretch", - "resizeHeight": "299", - "resizeWidth": "299" - }, - "factory": "mxpi_imageresize", - "next": "mxpi_tensorinfer0" - }, - "mxpi_tensorinfer0": { - "props": { - "dataSource": "mxpi_imageresize0", - "modelPath": "./models/inceptionv4/inceptionv4_dvpp.om", - "waitingTime": "2000", - "outputDeviceId": "-1" - }, - "factory": "mxpi_tensorinfer", - "next": "mxpi_classpostprocessor0" - }, - "mxpi_classpostprocessor0": { - "props": { - "dataSource": "mxpi_tensorinfer0", - "postProcessConfigPath": "./models/inceptionv4/inceptionv4.cfg", - "labelPath": "./models/inceptionv4/imagenet1000_clsidx_to_labels.names", - "postProcessLibPath": "../../../lib/modelpostprocessors/libresnet50postprocess.so" - }, - "factory": "mxpi_classpostprocessor", - "next": "mxpi_dataserialize0" - }, - "mxpi_dataserialize0": { - "props": { - "outputDataKeys": "mxpi_classpostprocessor0" - }, - "factory": "mxpi_dataserialize", - "next": "appsink0" - }, - "appsink0": { - "props": { - "blocksize": "4096000" - }, - "factory": "appsink" - } - } -} diff --git a/official/cv/inceptionv4/infer/sdk/python_inceptionv4/main.py b/official/cv/inceptionv4/infer/sdk/python_inceptionv4/main.py deleted file mode 100644 index e98ff5662..000000000 --- a/official/cv/inceptionv4/infer/sdk/python_inceptionv4/main.py +++ /dev/null @@ -1,95 +0,0 @@ -# coding=utf-8 - -""" -Copyright 2020 Huawei Technologies Co., Ltd - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - -http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -""" - -import datetime -import json -import os -import sys - -from StreamManagerApi import StreamManagerApi -from StreamManagerApi import MxDataInput - -if __name__ == '__main__': - # init stream manager - stream_manager_api = StreamManagerApi() - ret = stream_manager_api.InitManager() - if ret != 0: - print("Failed to init Stream manager, ret=%s" % str(ret)) - exit() - - # create streams by pipeline config file - with open("./pipeline/InceptionV4.pipeline", 'rb') as f: - pipelineStr = f.read() - ret = stream_manager_api.CreateMultipleStreams(pipelineStr) - - if ret != 0: - print("Failed to create Stream, ret=%s" % str(ret)) - exit() - - # Construct the input of the stream - data_input = MxDataInput() - - dir_name = sys.argv[1] - res_dir_name = sys.argv[2] - file_list = os.listdir(dir_name) - if not os.path.exists(res_dir_name): - os.makedirs(res_dir_name) - - for file_name in file_list: - file_path = os.path.join(dir_name, file_name) - if not (file_name.lower().endswith(".jpg") or file_name.lower().endswith(".jpeg")): - continue - - with open(file_path, 'rb') as f: - data_input.data = f.read() - - empty_data = [] - stream_name = b'im_inceptionv4' - in_plugin_id = 0 - unique_id = stream_manager_api.SendData(stream_name, in_plugin_id, data_input) - if unique_id < 0: - print("Failed to send data to stream.") - exit() - # Obtain the inference result by specifying streamName and uniqueId. - start_time = datetime.datetime.now() - infer_result = stream_manager_api.GetResult(stream_name, unique_id) - end_time = datetime.datetime.now() - print('sdk run time: {}'.format((end_time - start_time).microseconds)) - if infer_result.errorCode != 0: - print("GetResultWithUniqueId error. errorCode=%d, errorMsg=%s" % ( - infer_result.errorCode, infer_result.data.decode())) - exit() - # print the infer result - infer_res = infer_result.data.decode() - print("process img: {}, infer result: {}".format(file_name, infer_res)) - - load_dict = json.loads(infer_result.data.decode()) - if load_dict.get('MxpiClass') is None: - dot = file_name.find_last_of(".") - with open(res_dir_name + "/" + file_name.substr(0, dot) + '.txt', 'w') as f_write: - f_write.write("") - continue - res_vec = load_dict.get('MxpiClass') - - with open(res_dir_name + "/" + file_name[:-5] + '_1.txt', 'w') as f_write: - res_list = [str(item.get("classId")) + " " for item in res_vec] - f_write.writelines(res_list) - f_write.write('\n') - - # destroy streams - stream_manager_api.DestroyAllStreams() diff --git a/official/cv/inceptionv4/infer/sdk/python_inceptionv4/run.sh b/official/cv/inceptionv4/infer/sdk/python_inceptionv4/run.sh deleted file mode 100644 index 1148e0473..000000000 --- a/official/cv/inceptionv4/infer/sdk/python_inceptionv4/run.sh +++ /dev/null @@ -1,36 +0,0 @@ -#!/bin/bash - -# Copyright 2020 Huawei Technologies Co., Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -image_path=$1 -result_dir=$2 - -set -e - -CUR_PATH=$(cd "$(dirname "$0")" || { warn "Failed to check path/to/run.sh" ; exit ; } ; pwd) - -# Simple log helper functions -info() { echo -e "\033[1;34m[INFO ][MxStream] $1\033[1;37m" ; } -warn() { echo >&2 -e "\033[1;31m[WARN ][MxStream] $1\033[1;37m" ; } - -export MX_SDK_HOME=${CUR_PATH}/../../.. -export LD_LIBRARY_PATH=${MX_SDK_HOME}/lib:${MX_SDK_HOME}/opensource/lib:${MX_SDK_HOME}/opensource/lib64:/usr/local/Ascend/ascend-toolkit/latest/acllib/lib64:${LD_LIBRARY_PATH} -export GST_PLUGIN_SCANNER=${MX_SDK_HOME}/opensource/libexec/gstreamer-1.0/gst-plugin-scanner -export GST_PLUGIN_PATH=${MX_SDK_HOME}/opensource/lib/gstreamer-1.0:${MX_SDK_HOME}/lib/plugins - -#to set PYTHONPATH, import the StreamManagerApi.py -export PYTHONPATH=$PYTHONPATH:${MX_SDK_HOME}/python - -python3.7 main.py $image_path $result_dir -exit 0 \ No newline at end of file -- Gitee From 4f9d94ec27a5ae04eee65fce8398f10fcf3af97a Mon Sep 17 00:00:00 2001 From: dengqian06 <18987653966@163.com> Date: Sat, 25 Sep 2021 06:30:28 +0000 Subject: [PATCH 3/3] infer and modelarts --- official/cv/inceptionv4/infer/Dockerfile | 7 + .../cv/inceptionv4/infer/convert/aipp.cfg | 14 + .../inceptionv4/infer/convert/convert_om.sh | 46 + .../imagenet1000_clsidx_to_labels.names | 1002 +++++++++++++++++ .../infer/data/config/inceptionv4.cfg | 3 + .../inceptionv4/infer/docker_start_infer.sh | 42 + .../inceptionv4/infer/mxbase/CMakeLists.txt | 56 + official/cv/inceptionv4/infer/mxbase/build.sh | 47 + .../infer/mxbase/src/InceptionV4.cpp | 211 ++++ .../infer/mxbase/src/InceptionV4.h | 68 ++ .../cv/inceptionv4/infer/mxbase/src/main.cpp | 85 ++ .../infer/sdk/classification_task_metric.py | 178 +++ .../infer/sdk/pipeline/InceptionV4.pipeline | 64 ++ .../infer/sdk/python_inceptionv4/main.py | 95 ++ .../infer/sdk/python_inceptionv4/run.sh | 26 + 15 files changed, 1944 insertions(+) create mode 100644 official/cv/inceptionv4/infer/Dockerfile create mode 100644 official/cv/inceptionv4/infer/convert/aipp.cfg create mode 100644 official/cv/inceptionv4/infer/convert/convert_om.sh create mode 100644 official/cv/inceptionv4/infer/data/config/imagenet1000_clsidx_to_labels.names create mode 100644 official/cv/inceptionv4/infer/data/config/inceptionv4.cfg create mode 100644 official/cv/inceptionv4/infer/docker_start_infer.sh create mode 100644 official/cv/inceptionv4/infer/mxbase/CMakeLists.txt create mode 100644 official/cv/inceptionv4/infer/mxbase/build.sh create mode 100644 official/cv/inceptionv4/infer/mxbase/src/InceptionV4.cpp create mode 100644 official/cv/inceptionv4/infer/mxbase/src/InceptionV4.h create mode 100644 official/cv/inceptionv4/infer/mxbase/src/main.cpp create mode 100644 official/cv/inceptionv4/infer/sdk/classification_task_metric.py create mode 100644 official/cv/inceptionv4/infer/sdk/pipeline/InceptionV4.pipeline create mode 100644 official/cv/inceptionv4/infer/sdk/python_inceptionv4/main.py create mode 100644 official/cv/inceptionv4/infer/sdk/python_inceptionv4/run.sh diff --git a/official/cv/inceptionv4/infer/Dockerfile b/official/cv/inceptionv4/infer/Dockerfile new file mode 100644 index 000000000..4941a834d --- /dev/null +++ b/official/cv/inceptionv4/infer/Dockerfile @@ -0,0 +1,7 @@ +ARG FROM_IMAGE_NAME +FROM ${FROM_IMAGE_NAME} + +RUN ln -s /usr/local/python3.7.5/bin/python3.7 /usr/bin/python + +COPY sdk/requirements.txt . +RUN pip3.7 install diff --git a/official/cv/inceptionv4/infer/convert/aipp.cfg b/official/cv/inceptionv4/infer/convert/aipp.cfg new file mode 100644 index 000000000..405144743 --- /dev/null +++ b/official/cv/inceptionv4/infer/convert/aipp.cfg @@ -0,0 +1,14 @@ +aipp_op { + aipp_mode: static + input_format : RGB888_U8 + rbuv_swap_switch : true + mean_chn_0 : 0 + mean_chn_1 : 0 + mean_chn_2 : 0 + min_chn_0 : 125.307 + min_chn_1 : 122.961 + min_chn_2 : 113.8575 + var_reci_chn_0 : 0.0171247538316637 + var_reci_chn_1 : 0.0175070028011204 + var_reci_chn_2 : 0.0174291938997821 +} \ No newline at end of file diff --git a/official/cv/inceptionv4/infer/convert/convert_om.sh b/official/cv/inceptionv4/infer/convert/convert_om.sh new file mode 100644 index 000000000..4d3f4af61 --- /dev/null +++ b/official/cv/inceptionv4/infer/convert/convert_om.sh @@ -0,0 +1,46 @@ +#!/bin/bash + +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ + + +#!/bin/bash + +if [ $# -ne 3 ] +then + echo "Wrong parameter format." + echo "Usage:" + echo " bash $0 [INPUT_AIR_PATH] [AIPP_PATH] [OUTPUT_OM_PATH_NAME]" + echo "Example: " + echo " bash convert_om.sh xxx.air ./aipp.cfg xx" + + exit 1 +fi + +input_air_path=$1 +aipp_cfg_file=$2 +output_om_path=$3 + + +echo "Input AIR file path: ${input_air_path}" +echo "Output OM file path: ${output_om_path}" + +atc --input_format=NCHW \ + --framework=1 \ + --model="${input_air_path}" \ + --input_shape="x:1, 3, 299, 299" \ + --output="${output_om_path}" \ + --insert_op_conf="${aipp_cfg_file}" \ + --soc_version=Ascend310 \ No newline at end of file diff --git a/official/cv/inceptionv4/infer/data/config/imagenet1000_clsidx_to_labels.names b/official/cv/inceptionv4/infer/data/config/imagenet1000_clsidx_to_labels.names new file mode 100644 index 000000000..53c3c328e --- /dev/null +++ b/official/cv/inceptionv4/infer/data/config/imagenet1000_clsidx_to_labels.names @@ -0,0 +1,1002 @@ +# This is modified from https://gist.github.com/yreva/942d3a0ac09ec9e5eb3a +unknown type +tench, Tinca tinca +goldfish, Carassius auratus +great white shark, white shark, man-eater, man-eating shark, Carcharodon carcharias +tiger shark, Galeocerdo cuvieri +hammerhead, hammerhead shark +electric ray, crampfish, numbfish, torpedo +stingray +cock +hen +ostrich, Struthio camelus +brambling, Fringilla montifringilla +goldfinch, Carduelis carduelis +house finch, linnet, Carpodacus mexicanus +junco, snowbird +indigo bunting, indigo finch, indigo bird, Passerina cyanea +robin, American robin, Turdus migratorius +bulbul +jay +magpie +chickadee +water ouzel, dipper +kite +bald eagle, American eagle, Haliaeetus leucocephalus +vulture +great grey owl, great gray owl, Strix nebulosa +European fire salamander, Salamandra salamandra +common newt, Triturus vulgaris +eft +spotted salamander, Ambystoma maculatum +axolotl, mud puppy, Ambystoma mexicanum +bullfrog, Rana catesbeiana +tree frog, tree-frog +tailed frog, bell toad, ribbed toad, tailed toad, Ascaphus trui +loggerhead, loggerhead turtle, Caretta caretta +leatherback turtle, leatherback, leathery turtle, Dermochelys coriacea +mud turtle +terrapin +box turtle, box tortoise +banded gecko +common iguana, iguana, Iguana iguana +American chameleon, anole, Anolis carolinensis +whiptail, whiptail lizard +agama +frilled lizard, Chlamydosaurus kingi +alligator lizard +Gila monster, Heloderma suspectum +green lizard, Lacerta viridis +African chameleon, Chamaeleo chamaeleon +Komodo dragon, Komodo lizard, dragon lizard, giant lizard, Varanus komodoensis +African crocodile, Nile crocodile, Crocodylus niloticus +American alligator, Alligator mississipiensis +triceratops +thunder snake, worm snake, Carphophis amoenus +ringneck snake, ring-necked snake, ring snake +hognose snake, puff adder, sand viper +green snake, grass snake +king snake, kingsnake +garter snake, grass snake +water snake +vine snake +night snake, Hypsiglena torquata +boa constrictor, Constrictor constrictor +rock python, rock snake, Python sebae +Indian cobra, Naja naja +green mamba +sea snake +horned viper, cerastes, sand viper, horned asp, Cerastes cornutus +diamondback, diamondback rattlesnake, Crotalus adamanteus +sidewinder, horned rattlesnake, Crotalus cerastes +trilobite +harvestman, daddy longlegs, Phalangium opilio +scorpion +black and gold garden spider, Argiope aurantia +barn spider, Araneus cavaticus +garden spider, Aranea diademata +black widow, Latrodectus mactans +tarantula +wolf spider, hunting spider +tick +centipede +black grouse +ptarmigan +ruffed grouse, partridge, Bonasa umbellus +prairie chicken, prairie grouse, prairie fowl +peacock +quail +partridge +African grey, African gray, Psittacus erithacus +macaw +sulphur-crested cockatoo, Kakatoe galerita, Cacatua galerita +lorikeet +coucal +bee eater +hornbill +hummingbird +jacamar +toucan +drake +red-breasted merganser, Mergus serrator +goose +black swan, Cygnus atratus +tusker +echidna, spiny anteater, anteater +platypus, duckbill, duckbilled platypus, duck-billed platypus, Ornithorhynchus anatinus +wallaby, brush kangaroo +koala, koala bear, kangaroo bear, native bear, Phascolarctos cinereus +wombat +jellyfish +sea anemone, anemone +brain coral +flatworm, platyhelminth +nematode, nematode worm, roundworm +conch +snail +slug +sea slug, nudibranch +chiton, coat-of-mail shell, sea cradle, polyplacophore +chambered nautilus, pearly nautilus, nautilus +Dungeness crab, Cancer magister +rock crab, Cancer irroratus +fiddler crab +king crab, Alaska crab, Alaskan king crab, Alaska king crab, Paralithodes camtschatica +American lobster, Northern lobster, Maine lobster, Homarus americanus +spiny lobster, langouste, rock lobster, crawfish, crayfish, sea crawfish +crayfish, crawfish, crawdad, crawdaddy +hermit crab +isopod +white stork, Ciconia ciconia +black stork, Ciconia nigra +spoonbill +flamingo +little blue heron, Egretta caerulea +American egret, great white heron, Egretta albus +bittern +crane +limpkin, Aramus pictus +European gallinule, Porphyrio porphyrio +American coot, marsh hen, mud hen, water hen, Fulica americana +bustard +ruddy turnstone, Arenaria interpres +red-backed sandpiper, dunlin, Erolia alpina +redshank, Tringa totanus +dowitcher +oystercatcher, oyster catcher +pelican +king penguin, Aptenodytes patagonica +albatross, mollymawk +grey whale, gray whale, devilfish, Eschrichtius gibbosus, Eschrichtius robustus +killer whale, killer, orca, grampus, sea wolf, Orcinus orca +dugong, Dugong dugon +sea lion +Chihuahua +Japanese spaniel +Maltese dog, Maltese terrier, Maltese +Pekinese, Pekingese, Peke +Shih-Tzu +Blenheim spaniel +papillon +toy terrier +Rhodesian ridgeback +Afghan hound, Afghan +basset, basset hound +beagle +bloodhound, sleuthhound +bluetick +black-and-tan coonhound +Walker hound, Walker foxhound +English foxhound +redbone +borzoi, Russian wolfhound +Irish wolfhound +Italian greyhound +whippet +Ibizan hound, Ibizan Podenco +Norwegian elkhound, elkhound +otterhound, otter hound +Saluki, gazelle hound +Scottish deerhound, deerhound +Weimaraner +Staffordshire bullterrier, Staffordshire bull terrier +American Staffordshire terrier, Staffordshire terrier, American pit bull terrier, pit bull terrier +Bedlington terrier +Border terrier +Kerry blue terrier +Irish terrier +Norfolk terrier +Norwich terrier +Yorkshire terrier +wire-haired fox terrier +Lakeland terrier +Sealyham terrier, Sealyham +Airedale, Airedale terrier +cairn, cairn terrier +Australian terrier +Dandie Dinmont, Dandie Dinmont terrier +Boston bull, Boston terrier +miniature schnauzer +giant schnauzer +standard schnauzer +Scotch terrier, Scottish terrier, Scottie +Tibetan terrier, chrysanthemum dog +silky terrier, Sydney silky +soft-coated wheaten terrier +West Highland white terrier +Lhasa, Lhasa apso +flat-coated retriever +curly-coated retriever +golden retriever +Labrador retriever +Chesapeake Bay retriever +German short-haired pointer +vizsla, Hungarian pointer +English setter +Irish setter, red setter +Gordon setter +Brittany spaniel +clumber, clumber spaniel +English springer, English springer spaniel +Welsh springer spaniel +cocker spaniel, English cocker spaniel, cocker +Sussex spaniel +Irish water spaniel +kuvasz +schipperke +groenendael +malinois +briard +kelpie +komondor +Old English sheepdog, bobtail +Shetland sheepdog, Shetland sheep dog, Shetland +collie +Border collie +Bouvier des Flandres, Bouviers des Flandres +Rottweiler +German shepherd, German shepherd dog, German police dog, alsatian +Doberman, Doberman pinscher +miniature pinscher +Greater Swiss Mountain dog +Bernese mountain dog +Appenzeller +EntleBucher +boxer +bull mastiff +Tibetan mastiff +French bulldog +Great Dane +Saint Bernard, St Bernard +Eskimo dog, husky +malamute, malemute, Alaskan malamute +Siberian husky +dalmatian, coach dog, carriage dog +affenpinscher, monkey pinscher, monkey dog +basenji +pug, pug-dog +Leonberg +Newfoundland, Newfoundland dog +Great Pyrenees +Samoyed, Samoyede +Pomeranian +chow, chow chow +keeshond +Brabancon griffon +Pembroke, Pembroke Welsh corgi +Cardigan, Cardigan Welsh corgi +toy poodle +miniature poodle +standard poodle +Mexican hairless +timber wolf, grey wolf, gray wolf, Canis lupus +white wolf, Arctic wolf, Canis lupus tundrarum +red wolf, maned wolf, Canis rufus, Canis niger +coyote, prairie wolf, brush wolf, Canis latrans +dingo, warrigal, warragal, Canis dingo +dhole, Cuon alpinus +African hunting dog, hyena dog, Cape hunting dog, Lycaon pictus +hyena, hyaena +red fox, Vulpes vulpes +kit fox, Vulpes macrotis +Arctic fox, white fox, Alopex lagopus +grey fox, gray fox, Urocyon cinereoargenteus +tabby, tabby cat +tiger cat +Persian cat +Siamese cat, Siamese +Egyptian cat +cougar, puma, catamount, mountain lion, painter, panther, Felis concolor +lynx, catamount +leopard, Panthera pardus +snow leopard, ounce, Panthera uncia +jaguar, panther, Panthera onca, Felis onca +lion, king of beasts, Panthera leo +tiger, Panthera tigris +cheetah, chetah, Acinonyx jubatus +brown bear, bruin, Ursus arctos +American black bear, black bear, Ursus americanus, Euarctos americanus +ice bear, polar bear, Ursus Maritimus, Thalarctos maritimus +sloth bear, Melursus ursinus, Ursus ursinus +mongoose +meerkat, mierkat +tiger beetle +ladybug, ladybeetle, lady beetle, ladybird, ladybird beetle +ground beetle, carabid beetle +long-horned beetle, longicorn, longicorn beetle +leaf beetle, chrysomelid +dung beetle +rhinoceros beetle +weevil +fly +bee +ant, emmet, pismire +grasshopper, hopper +cricket +walking stick, walkingstick, stick insect +cockroach, roach +mantis, mantid +cicada, cicala +leafhopper +lacewing, lacewing fly +dragonfly, darning needle, devil's darning needle, sewing needle, snake feeder, snake doctor, mosquito hawk, skeeter hawk +damselfly +admiral +ringlet, ringlet butterfly +monarch, monarch butterfly, milkweed butterfly, Danaus plexippus +cabbage butterfly +sulphur butterfly, sulfur butterfly +lycaenid, lycaenid butterfly +starfish, sea star +sea urchin +sea cucumber, holothurian +wood rabbit, cottontail, cottontail rabbit +hare +Angora, Angora rabbit +hamster +porcupine, hedgehog +fox squirrel, eastern fox squirrel, Sciurus niger +marmot +beaver +guinea pig, Cavia cobaya +sorrel +zebra +hog, pig, grunter, squealer, Sus scrofa +wild boar, boar, Sus scrofa +warthog +hippopotamus, hippo, river horse, Hippopotamus amphibius +ox +water buffalo, water ox, Asiatic buffalo, Bubalus bubalis +bison +ram, tup +bighorn, bighorn sheep, cimarron, Rocky Mountain bighorn, Rocky Mountain sheep, Ovis canadensis +ibex, Capra ibex +hartebeest +impala, Aepyceros melampus +gazelle +Arabian camel, dromedary, Camelus dromedarius +llama +weasel +mink +polecat, fitch, foulmart, foumart, Mustela putorius +black-footed ferret, ferret, Mustela nigripes +otter +skunk, polecat, wood pussy +badger +armadillo +three-toed sloth, ai, Bradypus tridactylus +orangutan, orang, orangutang, Pongo pygmaeus +gorilla, Gorilla gorilla +chimpanzee, chimp, Pan troglodytes +gibbon, Hylobates lar +siamang, Hylobates syndactylus, Symphalangus syndactylus +guenon, guenon monkey +patas, hussar monkey, Erythrocebus patas +baboon +macaque +langur +colobus, colobus monkey +proboscis monkey, Nasalis larvatus +marmoset +capuchin, ringtail, Cebus capucinus +howler monkey, howler +titi, titi monkey +spider monkey, Ateles geoffroyi +squirrel monkey, Saimiri sciureus +Madagascar cat, ring-tailed lemur, Lemur catta +indri, indris, Indri indri, Indri brevicaudatus +Indian elephant, Elephas maximus +African elephant, Loxodonta africana +lesser panda, red panda, panda, bear cat, cat bear, Ailurus fulgens +giant panda, panda, panda bear, coon bear, Ailuropoda melanoleuca +barracouta, snoek +eel +coho, cohoe, coho salmon, blue jack, silver salmon, Oncorhynchus kisutch +rock beauty, Holocanthus tricolor +anemone fish +sturgeon +gar, garfish, garpike, billfish, Lepisosteus osseus +lionfish +puffer, pufferfish, blowfish, globefish +abacus +abaya +academic gown, academic robe, judge's robe +accordion, piano accordion, squeeze box +acoustic guitar +aircraft carrier, carrier, flattop, attack aircraft carrier +airliner +airship, dirigible +altar +ambulance +amphibian, amphibious vehicle +analog clock +apiary, bee house +apron +ashcan, trash can, garbage can, wastebin, ash bin, ash-bin, ashbin, dustbin, trash barrel, trash bin +assault rifle, assault gun +backpack, back pack, knapsack, packsack, rucksack, haversack +bakery, bakeshop, bakehouse +balance beam, beam +balloon +ballpoint, ballpoint pen, ballpen, Biro +Band Aid +banjo +bannister, banister, balustrade, balusters, handrail +barbell +barber chair +barbershop +barn +barometer +barrel, cask +barrow, garden cart, lawn cart, wheelbarrow +baseball +basketball +bassinet +bassoon +bathing cap, swimming cap +bath towel +bathtub, bathing tub, bath, tub +beach wagon, station wagon, wagon, estate car, beach waggon, station waggon, waggon +beacon, lighthouse, beacon light, pharos +beaker +bearskin, busby, shako +beer bottle +beer glass +bell cote, bell cot +bib +bicycle-built-for-two, tandem bicycle, tandem +bikini, two-piece +binder, ring-binder +binoculars, field glasses, opera glasses +birdhouse +boathouse +bobsled, bobsleigh, bob +bolo tie, bolo, bola tie, bola +bonnet, poke bonnet +bookcase +bookshop, bookstore, bookstall +bottlecap +bow +bow tie, bow-tie, bowtie +brass, memorial tablet, plaque +brassiere, bra, bandeau +breakwater, groin, groyne, mole, bulwark, seawall, jetty +breastplate, aegis, egis +broom +bucket, pail +buckle +bulletproof vest +bullet train, bullet +butcher shop, meat market +cab, hack, taxi, taxicab +caldron, cauldron +candle, taper, wax light +cannon +canoe +can opener, tin opener +cardigan +car mirror +carousel, carrousel, merry-go-round, roundabout, whirligig +carpenter's kit, tool kit +carton +car wheel +cash machine, cash dispenser, automated teller machine, automatic teller machine, automated teller, automatic teller, ATM +cassette +cassette player +castle +catamaran +CD player +cello, violoncello +cellular telephone, cellular phone, cellphone, cell, mobile phone +chain +chainlink fence +chain mail, ring mail, mail, chain armor, chain armour, ring armor, ring armour +chain saw, chainsaw +chest +chiffonier, commode +chime, bell, gong +china cabinet, china closet +Christmas stocking +church, church building +cinema, movie theater, movie theatre, movie house, picture palace +cleaver, meat cleaver, chopper +cliff dwelling +cloak +clog, geta, patten, sabot +cocktail shaker +coffee mug +coffeepot +coil, spiral, volute, whorl, helix +combination lock +computer keyboard, keypad +confectionery, confectionary, candy store +container ship, containership, container vessel +convertible +corkscrew, bottle screw +cornet, horn, trumpet, trump +cowboy boot +cowboy hat, ten-gallon hat +cradle +crane +crash helmet +crate +crib, cot +Crock Pot +croquet ball +crutch +cuirass +dam, dike, dyke +desk +desktop computer +dial telephone, dial phone +diaper, nappy, napkin +digital clock +digital watch +dining table, board +dishrag, dishcloth +dishwasher, dish washer, dishwashing machine +disk brake, disc brake +dock, dockage, docking facility +dogsled, dog sled, dog sleigh +dome +doormat, welcome mat +drilling platform, offshore rig +drum, membranophone, tympan +drumstick +dumbbell +Dutch oven +electric fan, blower +electric guitar +electric locomotive +entertainment center +envelope +espresso maker +face powder +feather boa, boa +file, file cabinet, filing cabinet +fireboat +fire engine, fire truck +fire screen, fireguard +flagpole, flagstaff +flute, transverse flute +folding chair +football helmet +forklift +fountain +fountain pen +four-poster +freight car +French horn, horn +frying pan, frypan, skillet +fur coat +garbage truck, dustcart +gasmask, respirator, gas helmet +gas pump, gasoline pump, petrol pump, island dispenser +goblet +go-kart +golf ball +golfcart, golf cart +gondola +gong, tam-tam +gown +grand piano, grand +greenhouse, nursery, glasshouse +grille, radiator grille +grocery store, grocery, food market, market +guillotine +hair slide +hair spray +half track +hammer +hamper +hand blower, blow dryer, blow drier, hair dryer, hair drier +hand-held computer, hand-held microcomputer +handkerchief, hankie, hanky, hankey +hard disc, hard disk, fixed disk +harmonica, mouth organ, harp, mouth harp +harp +harvester, reaper +hatchet +holster +home theater, home theatre +honeycomb +hook, claw +hoopskirt, crinoline +horizontal bar, high bar +horse cart, horse-cart +hourglass +iPod +iron, smoothing iron +jack-o'-lantern +jean, blue jean, denim +jeep, landrover +jersey, T-shirt, tee shirt +jigsaw puzzle +jinrikisha, ricksha, rickshaw +joystick +kimono +knee pad +knot +lab coat, laboratory coat +ladle +lampshade, lamp shade +laptop, laptop computer +lawn mower, mower +lens cap, lens cover +letter opener, paper knife, paperknife +library +lifeboat +lighter, light, igniter, ignitor +limousine, limo +liner, ocean liner +lipstick, lip rouge +Loafer +lotion +loudspeaker, speaker, speaker unit, loudspeaker system, speaker system +loupe, jeweler's loupe +lumbermill, sawmill +magnetic compass +mailbag, postbag +mailbox, letter box +maillot +maillot, tank suit +manhole cover +maraca +marimba, xylophone +mask +matchstick +maypole +maze, labyrinth +measuring cup +medicine chest, medicine cabinet +megalith, megalithic structure +microphone, mike +microwave, microwave oven +military uniform +milk can +minibus +miniskirt, mini +minivan +missile +mitten +mixing bowl +mobile home, manufactured home +Model T +modem +monastery +monitor +moped +mortar +mortarboard +mosque +mosquito net +motor scooter, scooter +mountain bike, all-terrain bike, off-roader +mountain tent +mouse, computer mouse +mousetrap +moving van +muzzle +nail +neck brace +necklace +nipple +notebook, notebook computer +obelisk +oboe, hautboy, hautbois +ocarina, sweet potato +odometer, hodometer, mileometer, milometer +oil filter +organ, pipe organ +oscilloscope, scope, cathode-ray oscilloscope, CRO +overskirt +oxcart +oxygen mask +packet +paddle, boat paddle +paddlewheel, paddle wheel +padlock +paintbrush +pajama, pyjama, pj's, jammies +palace +panpipe, pandean pipe, syrinx +paper towel +parachute, chute +parallel bars, bars +park bench +parking meter +passenger car, coach, carriage +patio, terrace +pay-phone, pay-station +pedestal, plinth, footstall +pencil box, pencil case +pencil sharpener +perfume, essence +Petri dish +photocopier +pick, plectrum, plectron +pickelhaube +picket fence, paling +pickup, pickup truck +pier +piggy bank, penny bank +pill bottle +pillow +ping-pong ball +pinwheel +pirate, pirate ship +pitcher, ewer +plane, carpenter's plane, woodworking plane +planetarium +plastic bag +plate rack +plow, plough +plunger, plumber's helper +Polaroid camera, Polaroid Land camera +pole +police van, police wagon, paddy wagon, patrol wagon, wagon, black Maria +poncho +pool table, billiard table, snooker table +pop bottle, soda bottle +pot, flowerpot +potter's wheel +power drill +prayer rug, prayer mat +printer +prison, prison house +projectile, missile +projector +puck, hockey puck +punching bag, punch bag, punching ball, punchball +purse +quill, quill pen +quilt, comforter, comfort, puff +racer, race car, racing car +racket, racquet +radiator +radio, wireless +radio telescope, radio reflector +rain barrel +recreational vehicle, RV, R.V. +reel +reflex camera +refrigerator, icebox +remote control, remote +restaurant, eating house, eating place, eatery +revolver, six-gun, six-shooter +rifle +rocking chair, rocker +rotisserie +rubber eraser, rubber, pencil eraser +rugby ball +rule, ruler +running shoe +safe +safety pin +saltshaker, salt shaker +sandal +sarong +sax, saxophone +scabbard +scale, weighing machine +school bus +schooner +scoreboard +screen, CRT screen +screw +screwdriver +seat belt, seatbelt +sewing machine +shield, buckler +shoe shop, shoe-shop, shoe store +shoji +shopping basket +shopping cart +shovel +shower cap +shower curtain +ski +ski mask +sleeping bag +slide rule, slipstick +sliding door +slot, one-armed bandit +snorkel +snowmobile +snowplow, snowplough +soap dispenser +soccer ball +sock +solar dish, solar collector, solar furnace +sombrero +soup bowl +space bar +space heater +space shuttle +spatula +speedboat +spider web, spider's web +spindle +sports car, sport car +spotlight, spot +stage +steam locomotive +steel arch bridge +steel drum +stethoscope +stole +stone wall +stopwatch, stop watch +stove +strainer +streetcar, tram, tramcar, trolley, trolley car +stretcher +studio couch, day bed +stupa, tope +submarine, pigboat, sub, U-boat +suit, suit of clothes +sundial +sunglass +sunglasses, dark glasses, shades +sunscreen, sunblock, sun blocker +suspension bridge +swab, swob, mop +sweatshirt +swimming trunks, bathing trunks +swing +switch, electric switch, electrical switch +syringe +table lamp +tank, army tank, armored combat vehicle, armoured combat vehicle +tape player +teapot +teddy, teddy bear +television, television system +tennis ball +thatch, thatched roof +theater curtain, theatre curtain +thimble +thresher, thrasher, threshing machine +throne +tile roof +toaster +tobacco shop, tobacconist shop, tobacconist +toilet seat +torch +totem pole +tow truck, tow car, wrecker +toyshop +tractor +trailer truck, tractor trailer, trucking rig, rig, articulated lorry, semi +tray +trench coat +tricycle, trike, velocipede +trimaran +tripod +triumphal arch +trolleybus, trolley coach, trackless trolley +trombone +tub, vat +turnstile +typewriter keyboard +umbrella +unicycle, monocycle +upright, upright piano +vacuum, vacuum cleaner +vase +vault +velvet +vending machine +vestment +viaduct +violin, fiddle +volleyball +waffle iron +wall clock +wallet, billfold, notecase, pocketbook +wardrobe, closet, press +warplane, military plane +washbasin, handbasin, washbowl, lavabo, wash-hand basin +washer, automatic washer, washing machine +water bottle +water jug +water tower +whiskey jug +whistle +wig +window screen +window shade +Windsor tie +wine bottle +wing +wok +wooden spoon +wool, woolen, woollen +worm fence, snake fence, snake-rail fence, Virginia fence +wreck +yawl +yurt +web site, website, internet site, site +comic book +crossword puzzle, crossword +street sign +traffic light, traffic signal, stoplight +book jacket, dust cover, dust jacket, dust wrapper +menu +plate +guacamole +consomme +hot pot, hotpot +trifle +ice cream, icecream +ice lolly, lolly, lollipop, popsicle +French loaf +bagel, beigel +pretzel +cheeseburger +hotdog, hot dog, red hot +mashed potato +head cabbage +broccoli +cauliflower +zucchini, courgette +spaghetti squash +acorn squash +butternut squash +cucumber, cuke +artichoke, globe artichoke +bell pepper +cardoon +mushroom +Granny Smith +strawberry +orange +lemon +fig +pineapple, ananas +banana +jackfruit, jak, jack +custard apple +pomegranate +hay +carbonara +chocolate sauce, chocolate syrup +dough +meat loaf, meatloaf +pizza, pizza pie +potpie +burrito +red wine +espresso +cup +eggnog +alp +bubble +cliff, drop, drop-off +coral reef +geyser +lakeside, lakeshore +promontory, headland, head, foreland +sandbar, sand bar +seashore, coast, seacoast, sea-coast +valley, vale +volcano +ballplayer, baseball player +groom, bridegroom +scuba diver +rapeseed +daisy +yellow lady's slipper, yellow lady-slipper, Cypripedium calceolus, Cypripedium parviflorum +corn +acorn +hip, rose hip, rosehip +buckeye, horse chestnut, conker +coral fungus +agaric +gyromitra +stinkhorn, carrion fungus +earthstar +hen-of-the-woods, hen of the woods, Polyporus frondosus, Grifola frondosa +bolete +ear, spike, capitulum +toilet tissue, toilet paper, bathroom tissue diff --git a/official/cv/inceptionv4/infer/data/config/inceptionv4.cfg b/official/cv/inceptionv4/infer/data/config/inceptionv4.cfg new file mode 100644 index 000000000..581fc76d3 --- /dev/null +++ b/official/cv/inceptionv4/infer/data/config/inceptionv4.cfg @@ -0,0 +1,3 @@ +CLASS_NUM=1000 +SOFTMAX=false +TOP_K=5 diff --git a/official/cv/inceptionv4/infer/docker_start_infer.sh b/official/cv/inceptionv4/infer/docker_start_infer.sh new file mode 100644 index 000000000..f501a561c --- /dev/null +++ b/official/cv/inceptionv4/infer/docker_start_infer.sh @@ -0,0 +1,42 @@ +#!/bin/bash + +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +docker_image=$1 +share_dir=$2 +echo "$1" +echo "$2" +if [ -z "${docker_image}" ]; then + echo "please input docker_image" + exit 1 +fi + +if [ ! -d "${share_dir}" ]; then + echo "please input share directory that contains dataset, models and codes" + exit 1 +fi + + +docker run -it \ + --device=/dev/davinci0 \ + --device=/dev/davinci_manager \ + --device=/dev/devmm_svm \ + --device=/dev/hisi_hdc \ + --privileged \ + -v //usr/local/bin/npu-smi:/usr/local/bin/npu-smi \ + -v /usr/local/Ascend/driver:/usr/local/Ascend/driver \ + -v ${share_dir}:${share_dir} \ + ${docker_image} \ + /bin/bash diff --git a/official/cv/inceptionv4/infer/mxbase/CMakeLists.txt b/official/cv/inceptionv4/infer/mxbase/CMakeLists.txt new file mode 100644 index 000000000..f439835b2 --- /dev/null +++ b/official/cv/inceptionv4/infer/mxbase/CMakeLists.txt @@ -0,0 +1,56 @@ +cmake_minimum_required(VERSION 3.10.0) +project(inceptionv4) + +set(TARGET inceptionv4) + +add_definitions(-DENABLE_DVPP_INTERFACE) +add_definitions(-D_GLIBCXX_USE_CXX11_ABI=0) +add_definitions(-Dgoogle=mindxsdk_private) +add_compile_options(-std=c++11 -fPIE -fstack-protector-all -fPIC -Wall) +add_link_options(-Wl,-z,relro,-z,now,-z,noexecstack -s -pie) + +# Check environment variable +if(NOT DEFINED ENV{ASCEND_HOME}) + message(FATAL_ERROR "please define environment variable:ASCEND_HOME") +endif() +if(NOT DEFINED ENV{ASCEND_VERSION}) + message(WARNING "please define environment variable:ASCEND_VERSION") +endif() +if(NOT DEFINED ENV{ARCH_PATTERN}) + message(WARNING "please define environment variable:ARCH_PATTERN") +endif() + +set(ACL_INC_DIR $ENV{ASCEND_HOME}/$ENV{ASCEND_VERSION}/$ENV{ARCH_PATTERN}/acllib/include) +set(ACL_LIB_DIR $ENV{ASCEND_HOME}/$ENV{ASCEND_VERSION}/$ENV{ARCH_PATTERN}/acllib/lib64) + +set(MXBASE_ROOT_DIR $ENV{MX_SDK_HOME}) +set(MXBASE_INC ${MXBASE_ROOT_DIR}/include) +set(MXBASE_LIB_DIR ${MXBASE_ROOT_DIR}/lib) +set(MXBASE_POST_LIB_DIR ${MXBASE_ROOT_DIR}/lib/modelpostprocessors) +set(MXBASE_POST_PROCESS_DIR ${MXBASE_ROOT_DIR}/include/MxBase/postprocess/include) + +if(DEFINED ENV{MXSDK_OPENSOURCE_DIR}) + set(OPENSOURCE_DIR $ENV{MXSDK_OPENSOURCE_DIR}) +else() + set(OPENSOURCE_DIR ${MXBASE_ROOT_DIR}/opensource) +endif() + + +include_directories(${ACL_INC_DIR}) +include_directories(${OPENSOURCE_DIR}/include) +include_directories(${OPENSOURCE_DIR}/include/opencv4) + +include_directories(${MXBASE_INC}) +include_directories(${MXBASE_POST_PROCESS_DIR}) + +link_directories(${ACL_LIB_DIR}) +link_directories(${OPENSOURCE_DIR}/lib) +link_directories(${MXBASE_LIB_DIR}) +link_directories(${MXBASE_POST_LIB_DIR}) + + +add_executable(${TARGET} src/main.cpp src/InceptionV4.cpp) + +target_link_libraries(${TARGET} glog cpprest mxbase resnet50postprocess opencv_world) + +install(TARGETS ${TARGET} RUNTIME DESTINATION ${PROJECT_SOURCE_DIR}/) diff --git a/official/cv/inceptionv4/infer/mxbase/build.sh b/official/cv/inceptionv4/infer/mxbase/build.sh new file mode 100644 index 000000000..76812328b --- /dev/null +++ b/official/cv/inceptionv4/infer/mxbase/build.sh @@ -0,0 +1,47 @@ +#!/bin/bash + +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ + + +# env + +mkdir -p build +cd build || exit + +function make_plugin() { + if ! cmake ..; + then + echo "cmake failed." + return 1 + fi + + if ! (make); + then + echo "make failed." + return 1 + fi + + return 0 +} + +if make_plugin; +then + echo "INFO: Build successfully." +else + echo "ERROR: Build failed." +fi + +cd - || exit diff --git a/official/cv/inceptionv4/infer/mxbase/src/InceptionV4.cpp b/official/cv/inceptionv4/infer/mxbase/src/InceptionV4.cpp new file mode 100644 index 000000000..6176f9611 --- /dev/null +++ b/official/cv/inceptionv4/infer/mxbase/src/InceptionV4.cpp @@ -0,0 +1,211 @@ +/* +* Copyright 2021 Huawei Technologies Co., Ltd. All rights reserved. +* +* Licensed under the Apache License, Version 2.0 (the "License"); +* you may not use this file except in compliance with the License. +* You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ + +#include "InceptionV4.h" +#include +#include +#include +#include +#include "MxBase/DeviceManager/DeviceManager.h" +#include "MxBase/Log/Log.h" + +using namespace MxBase; +namespace { + const uint32_t YUV_BYTE_NU = 3; + const uint32_t YUV_BYTE_DE = 2; + const uint32_t VPC_H_ALIGN = 2; +} + +APP_ERROR InceptionV4::Init(const InitParam &initParam) { + deviceId_ = initParam.deviceId; + APP_ERROR ret = MxBase::DeviceManager::GetInstance()->InitDevices(); + if (ret != APP_ERR_OK) { + LogError << "Init devices failed, ret=" << ret << "."; + return ret; + } + ret = MxBase::TensorContext::GetInstance()->SetContext(initParam.deviceId); + if (ret != APP_ERR_OK) { + LogError << "Set context failed, ret=" << ret << "."; + return ret; + } + dvppWrapper_ = std::make_shared(); + ret = dvppWrapper_->Init(); + if (ret != APP_ERR_OK) { + LogError << "DvppWrapper init failed, ret=" << ret << "."; + return ret; + } + model_ = std::make_shared(); + ret = model_->Init(initParam.modelPath, modelDesc_); + if (ret != APP_ERR_OK) { + LogError << "ModelInferenceProcessor init failed, ret=" << ret << "."; + return ret; + } + MxBase::ConfigData configData; + const std::string softmax = initParam.softmax ? "true" : "false"; + const std::string checkTensor = initParam.checkTensor ? "true" : "false"; + configData.SetJsonValue("CLASS_NUM", std::to_string(initParam.classNum)); + configData.SetJsonValue("TOP_K", std::to_string(initParam.topk)); + configData.SetJsonValue("SOFTMAX", softmax); + configData.SetJsonValue("CHECK_MODEL", checkTensor); + auto jsonStr = configData.GetCfgJson().serialize(); + std::map> config; + config["postProcessConfigContent"] = std::make_shared(jsonStr); + config["labelPath"] = std::make_shared(initParam.labelPath); + post_ = std::make_shared(); + ret = post_->Init(config); + if (ret != APP_ERR_OK) { + LogError << "Resnet50PostProcess init failed, ret=" << ret << "."; + return ret; + } + return APP_ERR_OK; +} + +APP_ERROR InceptionV4::DeInit() { + dvppWrapper_->DeInit(); + model_->DeInit(); + post_->DeInit(); + MxBase::DeviceManager::GetInstance()->DestroyDevices(); + return APP_ERR_OK; +} + +void InceptionV4::ReadImage(const std::string &imgPath, cv::Mat *imageMat) { + *imageMat = cv::imread(imgPath, cv::IMREAD_COLOR); +} + +void InceptionV4::ResizeImage(const cv::Mat &srcImageMat, cv::Mat *dstImageMat) { + static constexpr uint32_t resizeHeight = 299; + static constexpr uint32_t resizeWidth = 299; + cv::resize(srcImageMat, *dstImageMat, cv::Size(resizeWidth, resizeHeight)); +} + +APP_ERROR InceptionV4::CVMatToTensorBase(const cv::Mat &imageMat, + MxBase::TensorBase *tensorBase) { + const uint32_t dataSize = imageMat.cols * imageMat.rows * YUV_BYTE_NU; + LogInfo << "image size after resize" << imageMat.cols << " " << imageMat.rows; + MemoryData memoryDataDst(dataSize, MemoryData::MEMORY_DEVICE, deviceId_); + MemoryData memoryDataSrc(imageMat.data, dataSize, MemoryData::MEMORY_HOST_MALLOC); + APP_ERROR ret = MemoryHelper::MxbsMallocAndCopy(memoryDataDst, memoryDataSrc); + if (ret != APP_ERR_OK) { + LogError << GetError(ret) << "Memory malloc failed."; + return ret; + } + std::vector shape = { imageMat.rows * YUV444_RGB_WIDTH_NU, static_cast(imageMat.cols) }; + *tensorBase = TensorBase(memoryDataDst, false, shape, TENSOR_DTYPE_UINT8); + return APP_ERR_OK; +} + +APP_ERROR InceptionV4::Inference(const std::vector &inputs, + std::vector *outputs) { + auto dtypes = model_->GetOutputDataType(); + for (size_t i = 0; i < modelDesc_.outputTensors.size(); ++i) { + std::vector shape = {}; + for (size_t j = 0; j < modelDesc_.outputTensors[i].tensorDims.size(); ++j) { + shape.push_back((uint32_t)modelDesc_.outputTensors[i].tensorDims[j]); + } + TensorBase tensor(shape, dtypes[i], MemoryData::MemoryType::MEMORY_DEVICE, deviceId_); + APP_ERROR ret = TensorBase::TensorBaseMalloc(tensor); + if (ret != APP_ERR_OK) { + LogError << "TensorBaseMalloc failed, ret=" << ret << "."; + return ret; + } + outputs->push_back(tensor); + } + DynamicInfo dynamicInfo = {}; + dynamicInfo.dynamicType = DynamicType::STATIC_BATCH; + auto startTime = std::chrono::high_resolution_clock::now(); + APP_ERROR ret = model_->ModelInference(inputs, *outputs, dynamicInfo); + auto endTime = std::chrono::high_resolution_clock::now(); + double costMs = std::chrono::duration(endTime - startTime).count(); + inferCostTimeMilliSec += costMs; + if (ret != APP_ERR_OK) { + LogError << "ModelInference failed, ret=" << ret << "."; + return ret; + } + return APP_ERR_OK; +} + +APP_ERROR InceptionV4::PostProcess(const std::vector &inputs, + std::vector> *clsInfos) { + APP_ERROR ret = post_->Process(inputs, *clsInfos); + if (ret != APP_ERR_OK) { + LogError << "Process failed, ret=" << ret << "."; + return ret; + } + return APP_ERR_OK; +} + +APP_ERROR InceptionV4::SaveResult(const std::string &imgPath, + std::vector> *batchClsInfos) { + LogInfo << "image path" << imgPath; + std::string fileName = imgPath.substr(imgPath.find_last_of("/") + 1); + size_t dot = fileName.find_last_of("."); + std::string resFileName = "infer_results/" + fileName.substr(0, dot) + "_1.txt"; + LogInfo << "file path for saving result" << resFileName; + std::ofstream outfile(resFileName); + if (outfile.fail()) { + LogError << "Failed to open result file: "; + return APP_ERR_COMM_FAILURE; + } + uint32_t batchIndex = 0; + for (auto clsInfos : *batchClsInfos) { + std::string resultStr; + for (auto clsInfo : clsInfos) { + LogDebug << " className:" << clsInfo.className << " confidence:" << clsInfo.confidence << + " classIndex:" << clsInfo.classId; + resultStr += std::to_string(clsInfo.classId) + " "; + } + outfile << resultStr << std::endl; + batchIndex++; + } + outfile.close(); + return APP_ERR_OK; +} + +APP_ERROR InceptionV4::Process(const std::string &imgPath) { + cv::Mat imageMat; + ReadImage(imgPath, &imageMat); + ResizeImage(imageMat, &imageMat); + std::vector inputs = {}; + std::vector outputs = {}; + TensorBase tensorBase; + APP_ERROR ret = CVMatToTensorBase(imageMat, &tensorBase); + if (ret != APP_ERR_OK) { + LogError << "CVMatToTensorBase failed, ret=" << ret << "."; + return ret; + } + inputs.push_back(tensorBase); + auto startTime = std::chrono::high_resolution_clock::now(); + ret = Inference(inputs, &outputs); + auto endTime = std::chrono::high_resolution_clock::now(); + double costMs = std::chrono::duration(endTime - startTime).count(); + inferCostTimeMilliSec += costMs; + if (ret != APP_ERR_OK) { + LogError << "Inference failed, ret=" << ret << "."; + return ret; + } + std::vector> BatchClsInfos = {}; + ret = PostProcess(outputs, &BatchClsInfos); + if (ret != APP_ERR_OK) { + LogError << "PostProcess failed, ret=" << ret << "."; + return ret; + } + ret = SaveResult(imgPath, &BatchClsInfos); + if (ret != APP_ERR_OK) { + LogError << "Save infer results into file failed. ret = " << ret << "."; + return ret; + } + return APP_ERR_OK; +} diff --git a/official/cv/inceptionv4/infer/mxbase/src/InceptionV4.h b/official/cv/inceptionv4/infer/mxbase/src/InceptionV4.h new file mode 100644 index 000000000..4988a8024 --- /dev/null +++ b/official/cv/inceptionv4/infer/mxbase/src/InceptionV4.h @@ -0,0 +1,68 @@ +/* + * Copyright 2021 Huawei Technologies Co., Ltd. All rights reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef MXBASE_INCEPTIONV4_H +#define MXBASE_INCEPTIONV4_H + +#include +#include +#include +#include +#include "MxBase/DvppWrapper/DvppWrapper.h" +#include "MxBase/ModelInfer/ModelInferenceProcessor.h" +#include "MxBase/Tensor/TensorContext/TensorContext.h" +#include "ClassPostProcessors/Resnet50PostProcess.h" + +struct InitParam { + uint32_t deviceId; + std::string labelPath; + uint32_t classNum; + uint32_t topk; + bool softmax; + bool checkTensor; + std::string modelPath; +}; + +class InceptionV4 { + public: + APP_ERROR Init(const InitParam &initParam); + APP_ERROR DeInit(); + void ReadImage(const std::string &imgPath, cv::Mat *imageMat); + void ResizeImage(const cv::Mat &srcImageMat, cv::Mat *dstImageMat); + APP_ERROR CVMatToTensorBase(const cv::Mat &imageMat, MxBase::TensorBase *tensorBase); + APP_ERROR Inference(const std::vector &inputs, + std::vector *outputs); + APP_ERROR PostProcess(const std::vector &inputs, + std::vector> *clsInfos); + APP_ERROR Process(const std::string &imgPath); + // get infer time + double GetInferCostMilliSec() const {return inferCostTimeMilliSec;} + + private: + APP_ERROR SaveResult(const std::string &imgPath, + std::vector> *batchClsInfos); + + private: + std::shared_ptr dvppWrapper_; + std::shared_ptr model_; + std::shared_ptr post_; + MxBase::ModelDesc modelDesc_; + uint32_t deviceId_ = 0; + // infer time + double inferCostTimeMilliSec = 0.0; +}; + +#endif diff --git a/official/cv/inceptionv4/infer/mxbase/src/main.cpp b/official/cv/inceptionv4/infer/mxbase/src/main.cpp new file mode 100644 index 000000000..a69debc1d --- /dev/null +++ b/official/cv/inceptionv4/infer/mxbase/src/main.cpp @@ -0,0 +1,85 @@ +/* + * Copyright 2021 Huawei Technologies Co., Ltd. All rights reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include "InceptionV4.h" +#include "MxBase/Log/Log.h" + +namespace { + const uint32_t CLASS_NUM = 1000; +} // namespace + +APP_ERROR ScanImages(const std::string &path, std::vector *imgFiles) { + DIR *dirPtr = opendir(path.c_str()); + if (dirPtr == nullptr) { + LogError << "opendir failed. dir:" << path; + return APP_ERR_INTERNAL_ERROR; + } + dirent *direntPtr = nullptr; + while ((direntPtr = readdir(dirPtr)) != nullptr) { + std::string fileName = direntPtr->d_name; + if (fileName == "." || fileName == "..") { + continue; + } + + imgFiles->push_back(path + "/" + fileName); + } + closedir(dirPtr); + return APP_ERR_OK; +} + +int main(int argc, char* argv[]) { + if (argc <= 1) { + LogWarn << "Please input image path, such as './inceptionv4 image_dir'."; + return APP_ERR_OK; + } + + InitParam initParam = {}; + initParam.deviceId = 0; + initParam.classNum = CLASS_NUM; + initParam.labelPath = "../data/config/imagenet1000_clsidx_to_labels.names"; + initParam.topk = 5; + initParam.softmax = false; + initParam.checkTensor = true; + initParam.modelPath = "../data/models/inceptionV4.om"; + auto inferInceptionv4 = std::make_shared(); + APP_ERROR ret = inferInceptionv4->Init(initParam); + if (ret != APP_ERR_OK) { + inferInceptionv4->DeInit(); + LogError << "InceptionV4Classify init failed, ret=" << ret << "."; + return ret; + } + + std::string imgPath = argv[1]; + std::vector imgFilePaths; + ret = ScanImages(imgPath, &imgFilePaths); + if (ret != APP_ERR_OK) { + inferInceptionv4->DeInit(); + return ret; + } + for (auto &imgFile : imgFilePaths) { + ret = inferInceptionv4->Process(imgFile); + if (ret != APP_ERR_OK) { + LogError << "InceptionV4Classify process failed, ret=" << ret << "."; + inferInceptionv4->DeInit(); + return ret; + } + } + inferInceptionv4->DeInit(); + double fps = 1000.0 * imgFilePaths.size() / inferInceptionv4->GetInferCostMilliSec(); + LogInfo << " ms\tfps: " << fps << " imgs/sec"; + return APP_ERR_OK; +} diff --git a/official/cv/inceptionv4/infer/sdk/classification_task_metric.py b/official/cv/inceptionv4/infer/sdk/classification_task_metric.py new file mode 100644 index 000000000..fc9bc2b45 --- /dev/null +++ b/official/cv/inceptionv4/infer/sdk/classification_task_metric.py @@ -0,0 +1,178 @@ +#coding = utf-8 +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the BSD 3-Clause License (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://opensource.org/licenses/BSD-3-Clause +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import os +import sys +import json +import numpy as np + +np.set_printoptions(threshold=sys.maxsize) + +LABEL_FILE = "HiAI_label.json" + + +def gen_file_name(img_name): + full_name = img_name.split('/')[-1] + return os.path.splitext(full_name) + + +def cre_groundtruth_dict(gtfile_path): + """ + :param filename: file contains the imagename and label number + :return: dictionary key imagename, value is label number + """ + img_gt_dict = {} + for gtfile in os.listdir(gtfile_path): + if gtfile != LABEL_FILE: + with open(os.path.join(gtfile_path, gtfile), 'r') as f: + gt = json.load(f) + ret = gt["image"]["annotations"][0]["category_id"] + img_gt_dict[gen_file_name(gtfile)] = ret + return img_gt_dict + + +def cre_groundtruth_dict_fromtxt(gtfile_path): + """ + :param filename: file contains the imagename and label number + :return: dictionary key imagename, value is label number + """ + img_gt_dict = {} + with open(gtfile_path, 'r')as f: + for line in f.readlines(): + temp = line.strip().split(" ") + img_name = temp[0].split(".")[0] + img_lab = temp[1] + img_gt_dict[img_name] = img_lab + return img_gt_dict + + +def load_statistical_predict_result(filepath): + """ + function: + the prediction esult file data extraction + input: + result file:filepath + output: + n_label:numble of label + data_vec: the probabilitie of prediction in the 1000 + :return: probabilities, numble of label, in_type, color + """ + with open(filepath, 'r')as f: + data = f.readline() + temp = data.strip().split(" ") + n_label = len(temp) + data_vec = np.zeros((n_label), dtype=np.float32) + in_type = '' + color = '' + if n_label == 0: + in_type = f.readline() + color = f.readline() + else: + for ind, cls_ind in enumerate(temp): + data_vec[ind] = np.int(cls_ind) + return data_vec, n_label, in_type, color + + +def create_visualization_statistical_result(prediction_file_path, + result_store_path, _json_file_name, + img_gt_dict, topn=5): + """ + :param prediction_file_path: + :param result_store_path: + :param _json_file_name: + :param img_gt_dict: + :param topn: + :return: + """ + writer = open(os.path.join(result_store_path, _json_file_name), 'w') + table_dict = {} + table_dict["title"] = "Overall statistical evaluation" + table_dict["value"] = [] + + count = 0 + res_cnt = 0 + n_labels = "" + count_hit = np.zeros(topn) + for tfile_name in os.listdir(prediction_file_path): + count += 1 + temp = tfile_name.split('.')[0] + index = temp.rfind('_') + img_name = temp[:index] + filepath = os.path.join(prediction_file_path, tfile_name) + + ret = load_statistical_predict_result(filepath) + prediction = ret[0] + n_labels = ret[1] + + gt = img_gt_dict[img_name] + if n_labels == 1000: + real_label = int(gt) + elif n_labels == 1001: + real_label = int(gt) + 1 + else: + real_label = int(gt) + + res_cnt = min(len(prediction), topn) + for i in range(res_cnt): + if str(real_label) == str(int(prediction[i])): + count_hit[i] += 1 + break + if 'value' not in table_dict.keys(): + print("the item value does not exist!") + else: + table_dict["value"].extend( + [{"key": "Number of images", "value": str(count)}, + {"key": "Number of classes", "value": str(n_labels)}]) + if count == 0: + accuracy = 0 + else: + accuracy = np.cumsum(count_hit) / count + for i in range(res_cnt): + table_dict["value"].append({"key": "Top" + str(i + 1) + " accuracy", + "value": str( + round(accuracy[i] * 100, 2)) + '%'}) + json.dump(table_dict, writer) + writer.close() + + +if __name__ == '__main__': + try: + # txt file path + folder_davinci_target = sys.argv[1] + # annotation files path, "val_label.txt" + annotation_file_path = sys.argv[2] + # the path to store the results json path + result_json_path = sys.argv[3] + # result json file name + json_file_name = sys.argv[4] + except IndexError: + print("Please enter target file result folder | ground truth label file | result json file folder | " + "result json file name, such as ./result val_label.txt . result.json") + exit(1) + + if not os.path.exists(folder_davinci_target): + print("Target file folder does not exist.") + + if not os.path.exists(annotation_file_path): + print("Ground truth file does not exist.") + + if not os.path.exists(result_json_path): + print("Result folder doesn't exist.") + + img_label_dict = cre_groundtruth_dict_fromtxt(annotation_file_path) + create_visualization_statistical_result(folder_davinci_target, + result_json_path, json_file_name, + img_label_dict, topn=5) \ No newline at end of file diff --git a/official/cv/inceptionv4/infer/sdk/pipeline/InceptionV4.pipeline b/official/cv/inceptionv4/infer/sdk/pipeline/InceptionV4.pipeline new file mode 100644 index 000000000..e1e5f5c24 --- /dev/null +++ b/official/cv/inceptionv4/infer/sdk/pipeline/InceptionV4.pipeline @@ -0,0 +1,64 @@ +{ + "im_inceptionv4": { + "stream_config": { + "deviceId": "0" + }, + "appsrc1": { + "props": { + "blocksize": "409600" + }, + "factory": "appsrc", + "next": "mxpi_imagedecoder0" + }, + "mxpi_imagedecoder0": { + "props": { + "handleMethod": "opencv" + }, + "factory": "mxpi_imagedecoder", + "next": "mxpi_imageresize0" + }, + "mxpi_imageresize0": { + "props": { + "handleMethod": "opencv", + "resizeType": "Resizer_Stretch", + "resizeHeight": "299", + "resizeWidth": "299" + }, + "factory": "mxpi_imageresize", + "next": "mxpi_tensorinfer0" + }, + "mxpi_tensorinfer0": { + "props": { + "dataSource": "mxpi_imageresize0", + "modelPath": "../models/inceptionv4/inceptionv4.om", + "waitingTime": "2000", + "outputDeviceId": "-1" + }, + "factory": "mxpi_tensorinfer", + "next": "mxpi_classpostprocessor0" + }, + "mxpi_classpostprocessor0": { + "props": { + "dataSource": "mxpi_tensorinfer0", + "postProcessConfigPath": "../models/inceptionv4/inceptionv4.cfg", + "labelPath": "../models/inceptionv4/imagenet1000_clsidx_to_labels.names", + "postProcessLibPath": "libresnet50postprocess.so" + }, + "factory": "mxpi_classpostprocessor", + "next": "mxpi_dataserialize0" + }, + "mxpi_dataserialize0": { + "props": { + "outputDataKeys": "mxpi_classpostprocessor0" + }, + "factory": "mxpi_dataserialize", + "next": "appsink0" + }, + "appsink0": { + "props": { + "blocksize": "4096000" + }, + "factory": "appsink" + } + } +} diff --git a/official/cv/inceptionv4/infer/sdk/python_inceptionv4/main.py b/official/cv/inceptionv4/infer/sdk/python_inceptionv4/main.py new file mode 100644 index 000000000..3a3695b5c --- /dev/null +++ b/official/cv/inceptionv4/infer/sdk/python_inceptionv4/main.py @@ -0,0 +1,95 @@ +# coding=utf-8 + +""" +Copyright 2021 Huawei Technologies Co., Ltd + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + +http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" + +import datetime +import json +import os +import sys + +from StreamManagerApi import StreamManagerApi +from StreamManagerApi import MxDataInput + +if __name__ == '__main__': + # init stream manager + stream_manager_api = StreamManagerApi() + ret = stream_manager_api.InitManager() + if ret != 0: + print("Failed to init Stream manager, ret=%s" % str(ret)) + exit() + + # create streams by pipeline config file + with open("../pipeline/InceptionV4.pipeline", 'rb') as f: + pipelineStr = f.read() + ret = stream_manager_api.CreateMultipleStreams(pipelineStr) + + if ret != 0: + print("Failed to create Stream, ret=%s" % str(ret)) + exit() + + # Construct the input of the stream + data_input = MxDataInput() + + dir_name = sys.argv[1] + res_dir_name = sys.argv[2] + file_list = os.listdir(dir_name) + if not os.path.exists(res_dir_name): + os.makedirs(res_dir_name) + + for file_name in file_list: + file_path = os.path.join(dir_name, file_name) + if not (file_name.lower().endswith(".jpg") or file_name.lower().endswith(".jpeg")): + continue + + with open(file_path, 'rb') as f: + data_input.data = f.read() + + empty_data = [] + stream_name = b'im_inceptionv4' + in_plugin_id = 0 + unique_id = stream_manager_api.SendData(stream_name, in_plugin_id, data_input) + if unique_id < 0: + print("Failed to send data to stream.") + exit() + # Obtain the inference result by specifying streamName and uniqueId. + start_time = datetime.datetime.now() + infer_result = stream_manager_api.GetResult(stream_name, unique_id) + end_time = datetime.datetime.now() + print('sdk run time: {}'.format((end_time - start_time).microseconds)) + if infer_result.errorCode != 0: + print("GetResultWithUniqueId error. errorCode=%d, errorMsg=%s" % ( + infer_result.errorCode, infer_result.data.decode())) + exit() + # print the infer result + infer_res = infer_result.data.decode() + print("process img: {}, infer result: {}".format(file_name, infer_res)) + + load_dict = json.loads(infer_result.data.decode()) + if load_dict.get('MxpiClass') is None: + dot = file_name.find_last_of(".") + with open(os.path.join(res_dir_name, '{}.txt'.format(file_name.substr(0, dot))), 'w') as f_write: + f_write.write("") + continue + res_vec = load_dict.get('MxpiClass') + + with open(os.path.join(res_dir_name, '{}_1.txt'.format(file_name[:-5])), 'w') as f_write: + res_list = [str(item.get("classId")) + " " for item in res_vec] + f_write.writelines(res_list) + f_write.write('\n') + + # destroy streams + stream_manager_api.DestroyAllStreams() diff --git a/official/cv/inceptionv4/infer/sdk/python_inceptionv4/run.sh b/official/cv/inceptionv4/infer/sdk/python_inceptionv4/run.sh new file mode 100644 index 000000000..855d58a2f --- /dev/null +++ b/official/cv/inceptionv4/infer/sdk/python_inceptionv4/run.sh @@ -0,0 +1,26 @@ +#!/bin/bash + +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +image_path=$1 +result_dir=$2 + +set -e + +# Simple log helper functions +info() { echo -e "\033[1;34m[INFO ][MxStream] $1\033[1;37m" ; } +warn() { echo >&2 -e "\033[1;31m[WARN ][MxStream] $1\033[1;37m" ; } + +python3.7 main.py $image_path $result_dir +exit 0 \ No newline at end of file -- Gitee